All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos Maven / Gradle / Ivy

There is a newer version: 3.4.0
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: server/application_history_server.proto

package org.apache.hadoop.yarn.proto;

public final class ApplicationHistoryServerProtos {
  private ApplicationHistoryServerProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  public interface ApplicationHistoryDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationHistoryDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    boolean hasApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * optional string application_name = 2;
     */
    boolean hasApplicationName();
    /**
     * optional string application_name = 2;
     */
    java.lang.String getApplicationName();
    /**
     * optional string application_name = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes();

    /**
     * optional string application_type = 3;
     */
    boolean hasApplicationType();
    /**
     * optional string application_type = 3;
     */
    java.lang.String getApplicationType();
    /**
     * optional string application_type = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes();

    /**
     * optional string user = 4;
     */
    boolean hasUser();
    /**
     * optional string user = 4;
     */
    java.lang.String getUser();
    /**
     * optional string user = 4;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * optional string queue = 5;
     */
    boolean hasQueue();
    /**
     * optional string queue = 5;
     */
    java.lang.String getQueue();
    /**
     * optional string queue = 5;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * optional int64 submit_time = 6;
     */
    boolean hasSubmitTime();
    /**
     * optional int64 submit_time = 6;
     */
    long getSubmitTime();

    /**
     * optional int64 start_time = 7;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 7;
     */
    long getStartTime();

    /**
     * optional int64 finish_time = 8;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 8;
     */
    long getFinishTime();

    /**
     * optional string diagnostics_info = 9;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 9;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 9;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
     */
    boolean hasFinalApplicationStatus();
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();

    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
     */
    boolean hasYarnApplicationState();
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationHistoryDataProto}
   */
  public  static final class ApplicationHistoryDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationHistoryDataProto)
      ApplicationHistoryDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationHistoryDataProto.newBuilder() to construct.
    private ApplicationHistoryDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationHistoryDataProto() {
      applicationName_ = "";
      applicationType_ = "";
      user_ = "";
      queue_ = "";
      diagnosticsInfo_ = "";
      finalApplicationStatus_ = 0;
      yarnApplicationState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationHistoryDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationId_.toBuilder();
              }
              applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationId_);
                applicationId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              applicationName_ = bs;
              break;
            }
            case 26: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000004;
              applicationType_ = bs;
              break;
            }
            case 34: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000008;
              user_ = bs;
              break;
            }
            case 42: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000010;
              queue_ = bs;
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              submitTime_ = input.readInt64();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              startTime_ = input.readInt64();
              break;
            }
            case 64: {
              bitField0_ |= 0x00000080;
              finishTime_ = input.readInt64();
              break;
            }
            case 74: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000100;
              diagnosticsInfo_ = bs;
              break;
            }
            case 80: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(10, rawValue);
              } else {
                bitField0_ |= 0x00000200;
                finalApplicationStatus_ = rawValue;
              }
              break;
            }
            case 88: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(11, rawValue);
              } else {
                bitField0_ |= 0x00000400;
                yarnApplicationState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationHistoryDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int APPLICATION_NAME_FIELD_NUMBER = 2;
    private volatile java.lang.Object applicationName_;
    /**
     * optional string application_name = 2;
     */
    public boolean hasApplicationName() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string application_name = 2;
     */
    public java.lang.String getApplicationName() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationName_ = s;
        }
        return s;
      }
    }
    /**
     * optional string application_name = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int APPLICATION_TYPE_FIELD_NUMBER = 3;
    private volatile java.lang.Object applicationType_;
    /**
     * optional string application_type = 3;
     */
    public boolean hasApplicationType() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional string application_type = 3;
     */
    public java.lang.String getApplicationType() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationType_ = s;
        }
        return s;
      }
    }
    /**
     * optional string application_type = 3;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int USER_FIELD_NUMBER = 4;
    private volatile java.lang.Object user_;
    /**
     * optional string user = 4;
     */
    public boolean hasUser() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional string user = 4;
     */
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * optional string user = 4;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUE_FIELD_NUMBER = 5;
    private volatile java.lang.Object queue_;
    /**
     * optional string queue = 5;
     */
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional string queue = 5;
     */
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * optional string queue = 5;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SUBMIT_TIME_FIELD_NUMBER = 6;
    private long submitTime_;
    /**
     * optional int64 submit_time = 6;
     */
    public boolean hasSubmitTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional int64 submit_time = 6;
     */
    public long getSubmitTime() {
      return submitTime_;
    }

    public static final int START_TIME_FIELD_NUMBER = 7;
    private long startTime_;
    /**
     * optional int64 start_time = 7;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * optional int64 start_time = 7;
     */
    public long getStartTime() {
      return startTime_;
    }

    public static final int FINISH_TIME_FIELD_NUMBER = 8;
    private long finishTime_;
    /**
     * optional int64 finish_time = 8;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * optional int64 finish_time = 8;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 9;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 9;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * optional string diagnostics_info = 9;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 9;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 10;
    private int finalApplicationStatus_;
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
     */
    public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    public static final int YARN_APPLICATION_STATE_FIELD_NUMBER = 11;
    private int yarnApplicationState_;
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
     */
    public boolean hasYarnApplicationState() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(yarnApplicationState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, applicationType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, queue_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, submitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt64(7, startTime_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt64(8, finishTime_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeEnum(10, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeEnum(11, yarnApplicationState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, applicationType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, queue_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, submitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, startTime_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(8, finishTime_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(10, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(11, yarnApplicationState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasApplicationName() != other.hasApplicationName()) return false;
      if (hasApplicationName()) {
        if (!getApplicationName()
            .equals(other.getApplicationName())) return false;
      }
      if (hasApplicationType() != other.hasApplicationType()) return false;
      if (hasApplicationType()) {
        if (!getApplicationType()
            .equals(other.getApplicationType())) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasSubmitTime() != other.hasSubmitTime()) return false;
      if (hasSubmitTime()) {
        if (getSubmitTime()
            != other.getSubmitTime()) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (hasYarnApplicationState() != other.hasYarnApplicationState()) return false;
      if (hasYarnApplicationState()) {
        if (yarnApplicationState_ != other.yarnApplicationState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasApplicationName()) {
        hash = (37 * hash) + APPLICATION_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationName().hashCode();
      }
      if (hasApplicationType()) {
        hash = (37 * hash) + APPLICATION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationType().hashCode();
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasSubmitTime()) {
        hash = (37 * hash) + SUBMIT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSubmitTime());
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      if (hasYarnApplicationState()) {
        hash = (37 * hash) + YARN_APPLICATION_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationHistoryDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationHistoryDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationHistoryDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationName_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationType_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        user_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        queue_ = "";
        bitField0_ = (bitField0_ & ~0x00000010);
        submitTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000020);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000040);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000080);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000100);
        finalApplicationStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000200);
        yarnApplicationState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000400);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationIdBuilder_ == null) {
            result.applicationId_ = applicationId_;
          } else {
            result.applicationId_ = applicationIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.applicationName_ = applicationName_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          to_bitField0_ |= 0x00000004;
        }
        result.applicationType_ = applicationType_;
        if (((from_bitField0_ & 0x00000008) != 0)) {
          to_bitField0_ |= 0x00000008;
        }
        result.user_ = user_;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.queue_ = queue_;
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.submitTime_ = submitTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          to_bitField0_ |= 0x00000100;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000200) != 0)) {
          to_bitField0_ |= 0x00000200;
        }
        result.finalApplicationStatus_ = finalApplicationStatus_;
        if (((from_bitField0_ & 0x00000400) != 0)) {
          to_bitField0_ |= 0x00000400;
        }
        result.yarnApplicationState_ = yarnApplicationState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasApplicationName()) {
          bitField0_ |= 0x00000002;
          applicationName_ = other.applicationName_;
          onChanged();
        }
        if (other.hasApplicationType()) {
          bitField0_ |= 0x00000004;
          applicationType_ = other.applicationType_;
          onChanged();
        }
        if (other.hasUser()) {
          bitField0_ |= 0x00000008;
          user_ = other.user_;
          onChanged();
        }
        if (other.hasQueue()) {
          bitField0_ |= 0x00000010;
          queue_ = other.queue_;
          onChanged();
        }
        if (other.hasSubmitTime()) {
          setSubmitTime(other.getSubmitTime());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000100;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        if (other.hasYarnApplicationState()) {
          setYarnApplicationState(other.getYarnApplicationState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationId_ != null &&
              applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            applicationId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial();
          } else {
            applicationId_ = value;
          }
          onChanged();
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder clearApplicationId() {
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
          onChanged();
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object applicationName_ = "";
      /**
       * optional string application_name = 2;
       */
      public boolean hasApplicationName() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string application_name = 2;
       */
      public java.lang.String getApplicationName() {
        java.lang.Object ref = applicationName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string application_name = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationNameBytes() {
        java.lang.Object ref = applicationName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string application_name = 2;
       */
      public Builder setApplicationName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        applicationName_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string application_name = 2;
       */
      public Builder clearApplicationName() {
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationName_ = getDefaultInstance().getApplicationName();
        onChanged();
        return this;
      }
      /**
       * optional string application_name = 2;
       */
      public Builder setApplicationNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        applicationName_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object applicationType_ = "";
      /**
       * optional string application_type = 3;
       */
      public boolean hasApplicationType() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional string application_type = 3;
       */
      public java.lang.String getApplicationType() {
        java.lang.Object ref = applicationType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string application_type = 3;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTypeBytes() {
        java.lang.Object ref = applicationType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string application_type = 3;
       */
      public Builder setApplicationType(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        applicationType_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string application_type = 3;
       */
      public Builder clearApplicationType() {
        bitField0_ = (bitField0_ & ~0x00000004);
        applicationType_ = getDefaultInstance().getApplicationType();
        onChanged();
        return this;
      }
      /**
       * optional string application_type = 3;
       */
      public Builder setApplicationTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        applicationType_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object user_ = "";
      /**
       * optional string user = 4;
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional string user = 4;
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string user = 4;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string user = 4;
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        user_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string user = 4;
       */
      public Builder clearUser() {
        bitField0_ = (bitField0_ & ~0x00000008);
        user_ = getDefaultInstance().getUser();
        onChanged();
        return this;
      }
      /**
       * optional string user = 4;
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        user_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object queue_ = "";
      /**
       * optional string queue = 5;
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional string queue = 5;
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string queue = 5;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string queue = 5;
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        queue_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string queue = 5;
       */
      public Builder clearQueue() {
        bitField0_ = (bitField0_ & ~0x00000010);
        queue_ = getDefaultInstance().getQueue();
        onChanged();
        return this;
      }
      /**
       * optional string queue = 5;
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        queue_ = value;
        onChanged();
        return this;
      }

      private long submitTime_ ;
      /**
       * optional int64 submit_time = 6;
       */
      public boolean hasSubmitTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional int64 submit_time = 6;
       */
      public long getSubmitTime() {
        return submitTime_;
      }
      /**
       * optional int64 submit_time = 6;
       */
      public Builder setSubmitTime(long value) {
        bitField0_ |= 0x00000020;
        submitTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 submit_time = 6;
       */
      public Builder clearSubmitTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        submitTime_ = 0L;
        onChanged();
        return this;
      }

      private long startTime_ ;
      /**
       * optional int64 start_time = 7;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional int64 start_time = 7;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000040;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000040);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long finishTime_ ;
      /**
       * optional int64 finish_time = 8;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * optional int64 finish_time = 8;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 8;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000080;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 8;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000080);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 9;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * optional string diagnostics_info = 9;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 9;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 9;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000100;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 9;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000100);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 9;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000100;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
       */
      public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000200;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 10;
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00000200);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }

      private int yarnApplicationState_ = 1;
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
       */
      public boolean hasYarnApplicationState() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(yarnApplicationState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
       */
      public Builder setYarnApplicationState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000400;
        yarnApplicationState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 11;
       */
      public Builder clearYarnApplicationState() {
        bitField0_ = (bitField0_ & ~0x00000400);
        yarnApplicationState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationHistoryDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationHistoryDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationHistoryDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationHistoryDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationHistoryDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationStartDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationStartDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    boolean hasApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * optional string application_name = 2;
     */
    boolean hasApplicationName();
    /**
     * optional string application_name = 2;
     */
    java.lang.String getApplicationName();
    /**
     * optional string application_name = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes();

    /**
     * optional string application_type = 3;
     */
    boolean hasApplicationType();
    /**
     * optional string application_type = 3;
     */
    java.lang.String getApplicationType();
    /**
     * optional string application_type = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes();

    /**
     * optional string user = 4;
     */
    boolean hasUser();
    /**
     * optional string user = 4;
     */
    java.lang.String getUser();
    /**
     * optional string user = 4;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * optional string queue = 5;
     */
    boolean hasQueue();
    /**
     * optional string queue = 5;
     */
    java.lang.String getQueue();
    /**
     * optional string queue = 5;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * optional int64 submit_time = 6;
     */
    boolean hasSubmitTime();
    /**
     * optional int64 submit_time = 6;
     */
    long getSubmitTime();

    /**
     * optional int64 start_time = 7;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 7;
     */
    long getStartTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationStartDataProto}
   */
  public  static final class ApplicationStartDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationStartDataProto)
      ApplicationStartDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationStartDataProto.newBuilder() to construct.
    private ApplicationStartDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationStartDataProto() {
      applicationName_ = "";
      applicationType_ = "";
      user_ = "";
      queue_ = "";
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationStartDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationId_.toBuilder();
              }
              applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationId_);
                applicationId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              applicationName_ = bs;
              break;
            }
            case 26: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000004;
              applicationType_ = bs;
              break;
            }
            case 34: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000008;
              user_ = bs;
              break;
            }
            case 42: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000010;
              queue_ = bs;
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              submitTime_ = input.readInt64();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              startTime_ = input.readInt64();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationStartDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int APPLICATION_NAME_FIELD_NUMBER = 2;
    private volatile java.lang.Object applicationName_;
    /**
     * optional string application_name = 2;
     */
    public boolean hasApplicationName() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string application_name = 2;
     */
    public java.lang.String getApplicationName() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationName_ = s;
        }
        return s;
      }
    }
    /**
     * optional string application_name = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int APPLICATION_TYPE_FIELD_NUMBER = 3;
    private volatile java.lang.Object applicationType_;
    /**
     * optional string application_type = 3;
     */
    public boolean hasApplicationType() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional string application_type = 3;
     */
    public java.lang.String getApplicationType() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationType_ = s;
        }
        return s;
      }
    }
    /**
     * optional string application_type = 3;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int USER_FIELD_NUMBER = 4;
    private volatile java.lang.Object user_;
    /**
     * optional string user = 4;
     */
    public boolean hasUser() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional string user = 4;
     */
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * optional string user = 4;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUE_FIELD_NUMBER = 5;
    private volatile java.lang.Object queue_;
    /**
     * optional string queue = 5;
     */
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional string queue = 5;
     */
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * optional string queue = 5;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SUBMIT_TIME_FIELD_NUMBER = 6;
    private long submitTime_;
    /**
     * optional int64 submit_time = 6;
     */
    public boolean hasSubmitTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional int64 submit_time = 6;
     */
    public long getSubmitTime() {
      return submitTime_;
    }

    public static final int START_TIME_FIELD_NUMBER = 7;
    private long startTime_;
    /**
     * optional int64 start_time = 7;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * optional int64 start_time = 7;
     */
    public long getStartTime() {
      return startTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, applicationType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, queue_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, submitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt64(7, startTime_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, applicationType_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, queue_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, submitTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, startTime_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasApplicationName() != other.hasApplicationName()) return false;
      if (hasApplicationName()) {
        if (!getApplicationName()
            .equals(other.getApplicationName())) return false;
      }
      if (hasApplicationType() != other.hasApplicationType()) return false;
      if (hasApplicationType()) {
        if (!getApplicationType()
            .equals(other.getApplicationType())) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasSubmitTime() != other.hasSubmitTime()) return false;
      if (hasSubmitTime()) {
        if (getSubmitTime()
            != other.getSubmitTime()) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasApplicationName()) {
        hash = (37 * hash) + APPLICATION_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationName().hashCode();
      }
      if (hasApplicationType()) {
        hash = (37 * hash) + APPLICATION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationType().hashCode();
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasSubmitTime()) {
        hash = (37 * hash) + SUBMIT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSubmitTime());
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationStartDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationStartDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationStartDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationName_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationType_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        user_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        queue_ = "";
        bitField0_ = (bitField0_ & ~0x00000010);
        submitTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000020);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000040);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationIdBuilder_ == null) {
            result.applicationId_ = applicationId_;
          } else {
            result.applicationId_ = applicationIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.applicationName_ = applicationName_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          to_bitField0_ |= 0x00000004;
        }
        result.applicationType_ = applicationType_;
        if (((from_bitField0_ & 0x00000008) != 0)) {
          to_bitField0_ |= 0x00000008;
        }
        result.user_ = user_;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.queue_ = queue_;
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.submitTime_ = submitTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasApplicationName()) {
          bitField0_ |= 0x00000002;
          applicationName_ = other.applicationName_;
          onChanged();
        }
        if (other.hasApplicationType()) {
          bitField0_ |= 0x00000004;
          applicationType_ = other.applicationType_;
          onChanged();
        }
        if (other.hasUser()) {
          bitField0_ |= 0x00000008;
          user_ = other.user_;
          onChanged();
        }
        if (other.hasQueue()) {
          bitField0_ |= 0x00000010;
          queue_ = other.queue_;
          onChanged();
        }
        if (other.hasSubmitTime()) {
          setSubmitTime(other.getSubmitTime());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationId_ != null &&
              applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            applicationId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial();
          } else {
            applicationId_ = value;
          }
          onChanged();
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder clearApplicationId() {
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
          onChanged();
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object applicationName_ = "";
      /**
       * optional string application_name = 2;
       */
      public boolean hasApplicationName() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string application_name = 2;
       */
      public java.lang.String getApplicationName() {
        java.lang.Object ref = applicationName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string application_name = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationNameBytes() {
        java.lang.Object ref = applicationName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string application_name = 2;
       */
      public Builder setApplicationName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        applicationName_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string application_name = 2;
       */
      public Builder clearApplicationName() {
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationName_ = getDefaultInstance().getApplicationName();
        onChanged();
        return this;
      }
      /**
       * optional string application_name = 2;
       */
      public Builder setApplicationNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        applicationName_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object applicationType_ = "";
      /**
       * optional string application_type = 3;
       */
      public boolean hasApplicationType() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional string application_type = 3;
       */
      public java.lang.String getApplicationType() {
        java.lang.Object ref = applicationType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string application_type = 3;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTypeBytes() {
        java.lang.Object ref = applicationType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string application_type = 3;
       */
      public Builder setApplicationType(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        applicationType_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string application_type = 3;
       */
      public Builder clearApplicationType() {
        bitField0_ = (bitField0_ & ~0x00000004);
        applicationType_ = getDefaultInstance().getApplicationType();
        onChanged();
        return this;
      }
      /**
       * optional string application_type = 3;
       */
      public Builder setApplicationTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        applicationType_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object user_ = "";
      /**
       * optional string user = 4;
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional string user = 4;
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string user = 4;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string user = 4;
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        user_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string user = 4;
       */
      public Builder clearUser() {
        bitField0_ = (bitField0_ & ~0x00000008);
        user_ = getDefaultInstance().getUser();
        onChanged();
        return this;
      }
      /**
       * optional string user = 4;
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        user_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object queue_ = "";
      /**
       * optional string queue = 5;
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional string queue = 5;
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string queue = 5;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string queue = 5;
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        queue_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string queue = 5;
       */
      public Builder clearQueue() {
        bitField0_ = (bitField0_ & ~0x00000010);
        queue_ = getDefaultInstance().getQueue();
        onChanged();
        return this;
      }
      /**
       * optional string queue = 5;
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        queue_ = value;
        onChanged();
        return this;
      }

      private long submitTime_ ;
      /**
       * optional int64 submit_time = 6;
       */
      public boolean hasSubmitTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional int64 submit_time = 6;
       */
      public long getSubmitTime() {
        return submitTime_;
      }
      /**
       * optional int64 submit_time = 6;
       */
      public Builder setSubmitTime(long value) {
        bitField0_ |= 0x00000020;
        submitTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 submit_time = 6;
       */
      public Builder clearSubmitTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        submitTime_ = 0L;
        onChanged();
        return this;
      }

      private long startTime_ ;
      /**
       * optional int64 start_time = 7;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional int64 start_time = 7;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000040;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000040);
        startTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationStartDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationStartDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationStartDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationStartDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationFinishDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationFinishDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    boolean hasApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * optional int64 finish_time = 2;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 2;
     */
    long getFinishTime();

    /**
     * optional string diagnostics_info = 3;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    boolean hasFinalApplicationStatus();
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();

    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
     */
    boolean hasYarnApplicationState();
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationFinishDataProto}
   */
  public  static final class ApplicationFinishDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationFinishDataProto)
      ApplicationFinishDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationFinishDataProto.newBuilder() to construct.
    private ApplicationFinishDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationFinishDataProto() {
      diagnosticsInfo_ = "";
      finalApplicationStatus_ = 0;
      yarnApplicationState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationFinishDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationId_.toBuilder();
              }
              applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationId_);
                applicationId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              finishTime_ = input.readInt64();
              break;
            }
            case 26: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000004;
              diagnosticsInfo_ = bs;
              break;
            }
            case 32: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(4, rawValue);
              } else {
                bitField0_ |= 0x00000008;
                finalApplicationStatus_ = rawValue;
              }
              break;
            }
            case 40: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(5, rawValue);
              } else {
                bitField0_ |= 0x00000010;
                yarnApplicationState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationFinishDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int FINISH_TIME_FIELD_NUMBER = 2;
    private long finishTime_;
    /**
     * optional int64 finish_time = 2;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional int64 finish_time = 2;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 3;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 3;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 4;
    private int finalApplicationStatus_;
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    public static final int YARN_APPLICATION_STATE_FIELD_NUMBER = 5;
    private int yarnApplicationState_;
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
     */
    public boolean hasYarnApplicationState() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(yarnApplicationState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, finishTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeEnum(4, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, yarnApplicationState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, finishTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(4, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, yarnApplicationState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (hasYarnApplicationState() != other.hasYarnApplicationState()) return false;
      if (hasYarnApplicationState()) {
        if (yarnApplicationState_ != other.yarnApplicationState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      if (hasYarnApplicationState()) {
        hash = (37 * hash) + YARN_APPLICATION_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationFinishDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationFinishDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationFinishDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        finalApplicationStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        yarnApplicationState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationIdBuilder_ == null) {
            result.applicationId_ = applicationId_;
          } else {
            result.applicationId_ = applicationIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          to_bitField0_ |= 0x00000004;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000008) != 0)) {
          to_bitField0_ |= 0x00000008;
        }
        result.finalApplicationStatus_ = finalApplicationStatus_;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.yarnApplicationState_ = yarnApplicationState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000004;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        if (other.hasYarnApplicationState()) {
          setYarnApplicationState(other.getYarnApplicationState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
          onChanged();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationId_ != null &&
              applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            applicationId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial();
          } else {
            applicationId_ = value;
          }
          onChanged();
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public Builder clearApplicationId() {
        if (applicationIdBuilder_ == null) {
          applicationId_ = null;
          onChanged();
        } else {
          applicationIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto application_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private long finishTime_ ;
      /**
       * optional int64 finish_time = 2;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional int64 finish_time = 2;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 2;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000002;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 2;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000002);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 3;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000004);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000008;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00000008);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }

      private int yarnApplicationState_ = 1;
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
       */
      public boolean hasYarnApplicationState() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(yarnApplicationState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
       */
      public Builder setYarnApplicationState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        yarnApplicationState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 5;
       */
      public Builder clearYarnApplicationState() {
        bitField0_ = (bitField0_ & ~0x00000010);
        yarnApplicationState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationFinishDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationFinishDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationFinishDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationFinishDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationAttemptHistoryDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptHistoryDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    boolean hasApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();

    /**
     * optional string host = 2;
     */
    boolean hasHost();
    /**
     * optional string host = 2;
     */
    java.lang.String getHost();
    /**
     * optional string host = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * optional int32 rpc_port = 3;
     */
    boolean hasRpcPort();
    /**
     * optional int32 rpc_port = 3;
     */
    int getRpcPort();

    /**
     * optional string tracking_url = 4;
     */
    boolean hasTrackingUrl();
    /**
     * optional string tracking_url = 4;
     */
    java.lang.String getTrackingUrl();
    /**
     * optional string tracking_url = 4;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * optional string diagnostics_info = 5;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 5;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 5;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
     */
    boolean hasFinalApplicationStatus();
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();

    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    boolean hasMasterContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder();

    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
     */
    boolean hasYarnApplicationAttemptState();
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationAttemptHistoryDataProto}
   */
  public  static final class ApplicationAttemptHistoryDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptHistoryDataProto)
      ApplicationAttemptHistoryDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationAttemptHistoryDataProto.newBuilder() to construct.
    private ApplicationAttemptHistoryDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationAttemptHistoryDataProto() {
      host_ = "";
      trackingUrl_ = "";
      diagnosticsInfo_ = "";
      finalApplicationStatus_ = 0;
      yarnApplicationAttemptState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationAttemptHistoryDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationAttemptId_.toBuilder();
              }
              applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationAttemptId_);
                applicationAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              host_ = bs;
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              rpcPort_ = input.readInt32();
              break;
            }
            case 34: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000008;
              trackingUrl_ = bs;
              break;
            }
            case 42: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000010;
              diagnosticsInfo_ = bs;
              break;
            }
            case 48: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(6, rawValue);
              } else {
                bitField0_ |= 0x00000020;
                finalApplicationStatus_ = rawValue;
              }
              break;
            }
            case 58: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000040) != 0)) {
                subBuilder = masterContainerId_.toBuilder();
              }
              masterContainerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(masterContainerId_);
                masterContainerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000040;
              break;
            }
            case 64: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(8, rawValue);
              } else {
                bitField0_ |= 0x00000080;
                yarnApplicationAttemptState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    public static final int HOST_FIELD_NUMBER = 2;
    private volatile java.lang.Object host_;
    /**
     * optional string host = 2;
     */
    public boolean hasHost() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string host = 2;
     */
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * optional string host = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RPC_PORT_FIELD_NUMBER = 3;
    private int rpcPort_;
    /**
     * optional int32 rpc_port = 3;
     */
    public boolean hasRpcPort() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional int32 rpc_port = 3;
     */
    public int getRpcPort() {
      return rpcPort_;
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 4;
    private volatile java.lang.Object trackingUrl_;
    /**
     * optional string tracking_url = 4;
     */
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional string tracking_url = 4;
     */
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * optional string tracking_url = 4;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 5;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 5;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional string diagnostics_info = 5;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 5;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 6;
    private int finalApplicationStatus_;
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
     */
    public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    public static final int MASTER_CONTAINER_ID_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto masterContainerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    public boolean hasMasterContainerId() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId() {
      return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder() {
      return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
    }

    public static final int YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER = 8;
    private int yarnApplicationAttemptState_;
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
     */
    public boolean hasYarnApplicationAttemptState() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(yarnApplicationAttemptState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, trackingUrl_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeEnum(6, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(7, getMasterContainerId());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeEnum(8, yarnApplicationAttemptState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, trackingUrl_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(6, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getMasterContainerId());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(8, yarnApplicationAttemptState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasRpcPort() != other.hasRpcPort()) return false;
      if (hasRpcPort()) {
        if (getRpcPort()
            != other.getRpcPort()) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (hasMasterContainerId() != other.hasMasterContainerId()) return false;
      if (hasMasterContainerId()) {
        if (!getMasterContainerId()
            .equals(other.getMasterContainerId())) return false;
      }
      if (hasYarnApplicationAttemptState() != other.hasYarnApplicationAttemptState()) return false;
      if (hasYarnApplicationAttemptState()) {
        if (yarnApplicationAttemptState_ != other.yarnApplicationAttemptState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasRpcPort()) {
        hash = (37 * hash) + RPC_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getRpcPort();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      if (hasMasterContainerId()) {
        hash = (37 * hash) + MASTER_CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getMasterContainerId().hashCode();
      }
      if (hasYarnApplicationAttemptState()) {
        hash = (37 * hash) + YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationAttemptState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationAttemptHistoryDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptHistoryDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
          getMasterContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        host_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        rpcPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        trackingUrl_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000010);
        finalApplicationStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000020);
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = null;
        } else {
          masterContainerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        yarnApplicationAttemptState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000080);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationAttemptIdBuilder_ == null) {
            result.applicationAttemptId_ = applicationAttemptId_;
          } else {
            result.applicationAttemptId_ = applicationAttemptIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.host_ = host_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.rpcPort_ = rpcPort_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          to_bitField0_ |= 0x00000008;
        }
        result.trackingUrl_ = trackingUrl_;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000020) != 0)) {
          to_bitField0_ |= 0x00000020;
        }
        result.finalApplicationStatus_ = finalApplicationStatus_;
        if (((from_bitField0_ & 0x00000040) != 0)) {
          if (masterContainerIdBuilder_ == null) {
            result.masterContainerId_ = masterContainerId_;
          } else {
            result.masterContainerId_ = masterContainerIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          to_bitField0_ |= 0x00000080;
        }
        result.yarnApplicationAttemptState_ = yarnApplicationAttemptState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        if (other.hasHost()) {
          bitField0_ |= 0x00000002;
          host_ = other.host_;
          onChanged();
        }
        if (other.hasRpcPort()) {
          setRpcPort(other.getRpcPort());
        }
        if (other.hasTrackingUrl()) {
          bitField0_ |= 0x00000008;
          trackingUrl_ = other.trackingUrl_;
          onChanged();
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000010;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        if (other.hasMasterContainerId()) {
          mergeMasterContainerId(other.getMasterContainerId());
        }
        if (other.hasYarnApplicationAttemptState()) {
          setYarnApplicationAttemptState(other.getYarnApplicationAttemptState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationAttemptId_ != null &&
              applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            applicationAttemptId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial();
          } else {
            applicationAttemptId_ = value;
          }
          onChanged();
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder clearApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }

      private java.lang.Object host_ = "";
      /**
       * optional string host = 2;
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string host = 2;
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string host = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string host = 2;
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        host_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string host = 2;
       */
      public Builder clearHost() {
        bitField0_ = (bitField0_ & ~0x00000002);
        host_ = getDefaultInstance().getHost();
        onChanged();
        return this;
      }
      /**
       * optional string host = 2;
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        host_ = value;
        onChanged();
        return this;
      }

      private int rpcPort_ ;
      /**
       * optional int32 rpc_port = 3;
       */
      public boolean hasRpcPort() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public int getRpcPort() {
        return rpcPort_;
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public Builder setRpcPort(int value) {
        bitField0_ |= 0x00000004;
        rpcPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public Builder clearRpcPort() {
        bitField0_ = (bitField0_ & ~0x00000004);
        rpcPort_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * optional string tracking_url = 4;
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional string tracking_url = 4;
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string tracking_url = 4;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string tracking_url = 4;
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        trackingUrl_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string tracking_url = 4;
       */
      public Builder clearTrackingUrl() {
        bitField0_ = (bitField0_ & ~0x00000008);
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        onChanged();
        return this;
      }
      /**
       * optional string tracking_url = 4;
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        trackingUrl_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 5;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional string diagnostics_info = 5;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 5;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 5;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 5;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000010);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 5;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
       */
      public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000020;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 6;
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00000020);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto masterContainerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> masterContainerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public boolean hasMasterContainerId() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId() {
        if (masterContainerIdBuilder_ == null) {
          return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
        } else {
          return masterContainerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public Builder setMasterContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (masterContainerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          masterContainerId_ = value;
          onChanged();
        } else {
          masterContainerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public Builder setMasterContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = builderForValue.build();
          onChanged();
        } else {
          masterContainerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public Builder mergeMasterContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (masterContainerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
              masterContainerId_ != null &&
              masterContainerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            masterContainerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(masterContainerId_).mergeFrom(value).buildPartial();
          } else {
            masterContainerId_ = value;
          }
          onChanged();
        } else {
          masterContainerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000040;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public Builder clearMasterContainerId() {
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = null;
          onChanged();
        } else {
          masterContainerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getMasterContainerIdBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getMasterContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder() {
        if (masterContainerIdBuilder_ != null) {
          return masterContainerIdBuilder_.getMessageOrBuilder();
        } else {
          return masterContainerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 7;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getMasterContainerIdFieldBuilder() {
        if (masterContainerIdBuilder_ == null) {
          masterContainerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getMasterContainerId(),
                  getParentForChildren(),
                  isClean());
          masterContainerId_ = null;
        }
        return masterContainerIdBuilder_;
      }

      private int yarnApplicationAttemptState_ = 1;
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
       */
      public boolean hasYarnApplicationAttemptState() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(yarnApplicationAttemptState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
       */
      public Builder setYarnApplicationAttemptState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000080;
        yarnApplicationAttemptState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
       */
      public Builder clearYarnApplicationAttemptState() {
        bitField0_ = (bitField0_ & ~0x00000080);
        yarnApplicationAttemptState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptHistoryDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptHistoryDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationAttemptHistoryDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationAttemptHistoryDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptHistoryDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationAttemptStartDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptStartDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    boolean hasApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();

    /**
     * optional string host = 2;
     */
    boolean hasHost();
    /**
     * optional string host = 2;
     */
    java.lang.String getHost();
    /**
     * optional string host = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * optional int32 rpc_port = 3;
     */
    boolean hasRpcPort();
    /**
     * optional int32 rpc_port = 3;
     */
    int getRpcPort();

    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    boolean hasMasterContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationAttemptStartDataProto}
   */
  public  static final class ApplicationAttemptStartDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptStartDataProto)
      ApplicationAttemptStartDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationAttemptStartDataProto.newBuilder() to construct.
    private ApplicationAttemptStartDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationAttemptStartDataProto() {
      host_ = "";
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationAttemptStartDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationAttemptId_.toBuilder();
              }
              applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationAttemptId_);
                applicationAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              host_ = bs;
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              rpcPort_ = input.readInt32();
              break;
            }
            case 34: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000008) != 0)) {
                subBuilder = masterContainerId_.toBuilder();
              }
              masterContainerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(masterContainerId_);
                masterContainerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000008;
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    public static final int HOST_FIELD_NUMBER = 2;
    private volatile java.lang.Object host_;
    /**
     * optional string host = 2;
     */
    public boolean hasHost() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string host = 2;
     */
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * optional string host = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RPC_PORT_FIELD_NUMBER = 3;
    private int rpcPort_;
    /**
     * optional int32 rpc_port = 3;
     */
    public boolean hasRpcPort() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional int32 rpc_port = 3;
     */
    public int getRpcPort() {
      return rpcPort_;
    }

    public static final int MASTER_CONTAINER_ID_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto masterContainerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    public boolean hasMasterContainerId() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId() {
      return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder() {
      return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getMasterContainerId());
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getMasterContainerId());
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasRpcPort() != other.hasRpcPort()) return false;
      if (hasRpcPort()) {
        if (getRpcPort()
            != other.getRpcPort()) return false;
      }
      if (hasMasterContainerId() != other.hasMasterContainerId()) return false;
      if (hasMasterContainerId()) {
        if (!getMasterContainerId()
            .equals(other.getMasterContainerId())) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasRpcPort()) {
        hash = (37 * hash) + RPC_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getRpcPort();
      }
      if (hasMasterContainerId()) {
        hash = (37 * hash) + MASTER_CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getMasterContainerId().hashCode();
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationAttemptStartDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptStartDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
          getMasterContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        host_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        rpcPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = null;
        } else {
          masterContainerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationAttemptIdBuilder_ == null) {
            result.applicationAttemptId_ = applicationAttemptId_;
          } else {
            result.applicationAttemptId_ = applicationAttemptIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.host_ = host_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.rpcPort_ = rpcPort_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          if (masterContainerIdBuilder_ == null) {
            result.masterContainerId_ = masterContainerId_;
          } else {
            result.masterContainerId_ = masterContainerIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        if (other.hasHost()) {
          bitField0_ |= 0x00000002;
          host_ = other.host_;
          onChanged();
        }
        if (other.hasRpcPort()) {
          setRpcPort(other.getRpcPort());
        }
        if (other.hasMasterContainerId()) {
          mergeMasterContainerId(other.getMasterContainerId());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationAttemptId_ != null &&
              applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            applicationAttemptId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial();
          } else {
            applicationAttemptId_ = value;
          }
          onChanged();
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder clearApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }

      private java.lang.Object host_ = "";
      /**
       * optional string host = 2;
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string host = 2;
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string host = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string host = 2;
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        host_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string host = 2;
       */
      public Builder clearHost() {
        bitField0_ = (bitField0_ & ~0x00000002);
        host_ = getDefaultInstance().getHost();
        onChanged();
        return this;
      }
      /**
       * optional string host = 2;
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        host_ = value;
        onChanged();
        return this;
      }

      private int rpcPort_ ;
      /**
       * optional int32 rpc_port = 3;
       */
      public boolean hasRpcPort() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public int getRpcPort() {
        return rpcPort_;
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public Builder setRpcPort(int value) {
        bitField0_ |= 0x00000004;
        rpcPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 rpc_port = 3;
       */
      public Builder clearRpcPort() {
        bitField0_ = (bitField0_ & ~0x00000004);
        rpcPort_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto masterContainerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> masterContainerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public boolean hasMasterContainerId() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getMasterContainerId() {
        if (masterContainerIdBuilder_ == null) {
          return masterContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
        } else {
          return masterContainerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public Builder setMasterContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (masterContainerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          masterContainerId_ = value;
          onChanged();
        } else {
          masterContainerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public Builder setMasterContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = builderForValue.build();
          onChanged();
        } else {
          masterContainerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public Builder mergeMasterContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (masterContainerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
              masterContainerId_ != null &&
              masterContainerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            masterContainerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(masterContainerId_).mergeFrom(value).buildPartial();
          } else {
            masterContainerId_ = value;
          }
          onChanged();
        } else {
          masterContainerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public Builder clearMasterContainerId() {
        if (masterContainerIdBuilder_ == null) {
          masterContainerId_ = null;
          onChanged();
        } else {
          masterContainerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getMasterContainerIdBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getMasterContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getMasterContainerIdOrBuilder() {
        if (masterContainerIdBuilder_ != null) {
          return masterContainerIdBuilder_.getMessageOrBuilder();
        } else {
          return masterContainerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : masterContainerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto master_container_id = 4;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getMasterContainerIdFieldBuilder() {
        if (masterContainerIdBuilder_ == null) {
          masterContainerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getMasterContainerId(),
                  getParentForChildren(),
                  isClean());
          masterContainerId_ = null;
        }
        return masterContainerIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptStartDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptStartDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationAttemptStartDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationAttemptStartDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationAttemptFinishDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptFinishDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    boolean hasApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();

    /**
     * optional string tracking_url = 2;
     */
    boolean hasTrackingUrl();
    /**
     * optional string tracking_url = 2;
     */
    java.lang.String getTrackingUrl();
    /**
     * optional string tracking_url = 2;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * optional string diagnostics_info = 3;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    boolean hasFinalApplicationStatus();
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();

    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
     */
    boolean hasYarnApplicationAttemptState();
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationAttemptFinishDataProto}
   */
  public  static final class ApplicationAttemptFinishDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptFinishDataProto)
      ApplicationAttemptFinishDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationAttemptFinishDataProto.newBuilder() to construct.
    private ApplicationAttemptFinishDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ApplicationAttemptFinishDataProto() {
      trackingUrl_ = "";
      diagnosticsInfo_ = "";
      finalApplicationStatus_ = 0;
      yarnApplicationAttemptState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ApplicationAttemptFinishDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = applicationAttemptId_.toBuilder();
              }
              applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationAttemptId_);
                applicationAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000002;
              trackingUrl_ = bs;
              break;
            }
            case 26: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000004;
              diagnosticsInfo_ = bs;
              break;
            }
            case 32: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(4, rawValue);
              } else {
                bitField0_ |= 0x00000008;
                finalApplicationStatus_ = rawValue;
              }
              break;
            }
            case 40: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(5, rawValue);
              } else {
                bitField0_ |= 0x00000010;
                yarnApplicationAttemptState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 2;
    private volatile java.lang.Object trackingUrl_;
    /**
     * optional string tracking_url = 2;
     */
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional string tracking_url = 2;
     */
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * optional string tracking_url = 2;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 3;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 3;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 4;
    private int finalApplicationStatus_;
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    public static final int YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER = 5;
    private int yarnApplicationAttemptState_;
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
     */
    public boolean hasYarnApplicationAttemptState() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(yarnApplicationAttemptState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, trackingUrl_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeEnum(4, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, yarnApplicationAttemptState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, trackingUrl_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(4, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, yarnApplicationAttemptState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (hasYarnApplicationAttemptState() != other.hasYarnApplicationAttemptState()) return false;
      if (hasYarnApplicationAttemptState()) {
        if (yarnApplicationAttemptState_ != other.yarnApplicationAttemptState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      if (hasYarnApplicationAttemptState()) {
        hash = (37 * hash) + YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationAttemptState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationAttemptFinishDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptFinishDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        trackingUrl_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        finalApplicationStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        yarnApplicationAttemptState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (applicationAttemptIdBuilder_ == null) {
            result.applicationAttemptId_ = applicationAttemptId_;
          } else {
            result.applicationAttemptId_ = applicationAttemptIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          to_bitField0_ |= 0x00000002;
        }
        result.trackingUrl_ = trackingUrl_;
        if (((from_bitField0_ & 0x00000004) != 0)) {
          to_bitField0_ |= 0x00000004;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000008) != 0)) {
          to_bitField0_ |= 0x00000008;
        }
        result.finalApplicationStatus_ = finalApplicationStatus_;
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.yarnApplicationAttemptState_ = yarnApplicationAttemptState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        if (other.hasTrackingUrl()) {
          bitField0_ |= 0x00000002;
          trackingUrl_ = other.trackingUrl_;
          onChanged();
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000004;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        if (other.hasYarnApplicationAttemptState()) {
          setYarnApplicationAttemptState(other.getYarnApplicationAttemptState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              applicationAttemptId_ != null &&
              applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            applicationAttemptId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial();
          } else {
            applicationAttemptId_ = value;
          }
          onChanged();
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder clearApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = null;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * optional string tracking_url = 2;
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional string tracking_url = 2;
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string tracking_url = 2;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string tracking_url = 2;
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        trackingUrl_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string tracking_url = 2;
       */
      public Builder clearTrackingUrl() {
        bitField0_ = (bitField0_ & ~0x00000002);
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        onChanged();
        return this;
      }
      /**
       * optional string tracking_url = 2;
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        trackingUrl_ = value;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 3;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000004);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000008;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 4;
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00000008);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }

      private int yarnApplicationAttemptState_ = 1;
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
       */
      public boolean hasYarnApplicationAttemptState() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.valueOf(yarnApplicationAttemptState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
       */
      public Builder setYarnApplicationAttemptState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        yarnApplicationAttemptState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
       */
      public Builder clearYarnApplicationAttemptState() {
        bitField0_ = (bitField0_ & ~0x00000010);
        yarnApplicationAttemptState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptFinishDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptFinishDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ApplicationAttemptFinishDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ApplicationAttemptFinishDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerHistoryDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerHistoryDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    boolean hasContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    boolean hasAllocatedResource();
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource();
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder();

    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    boolean hasAssignedNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder();

    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    boolean hasPriority();
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * optional int64 start_time = 5;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 5;
     */
    long getStartTime();

    /**
     * optional int64 finish_time = 6;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 6;
     */
    long getFinishTime();

    /**
     * optional string diagnostics_info = 7;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 7;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 7;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional int32 container_exit_status = 8;
     */
    boolean hasContainerExitStatus();
    /**
     * optional int32 container_exit_status = 8;
     */
    int getContainerExitStatus();

    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 9;
     */
    boolean hasContainerState();
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 9;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerHistoryDataProto}
   */
  public  static final class ContainerHistoryDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerHistoryDataProto)
      ContainerHistoryDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerHistoryDataProto.newBuilder() to construct.
    private ContainerHistoryDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ContainerHistoryDataProto() {
      diagnosticsInfo_ = "";
      containerState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ContainerHistoryDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = containerId_.toBuilder();
              }
              containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containerId_);
                containerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000002) != 0)) {
                subBuilder = allocatedResource_.toBuilder();
              }
              allocatedResource_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(allocatedResource_);
                allocatedResource_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000002;
              break;
            }
            case 26: {
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000004) != 0)) {
                subBuilder = assignedNodeId_.toBuilder();
              }
              assignedNodeId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(assignedNodeId_);
                assignedNodeId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000004;
              break;
            }
            case 34: {
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000008) != 0)) {
                subBuilder = priority_.toBuilder();
              }
              priority_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(priority_);
                priority_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000008;
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              startTime_ = input.readInt64();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              finishTime_ = input.readInt64();
              break;
            }
            case 58: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000040;
              diagnosticsInfo_ = bs;
              break;
            }
            case 64: {
              bitField0_ |= 0x00000080;
              containerExitStatus_ = input.readInt32();
              break;
            }
            case 72: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(9, rawValue);
              } else {
                bitField0_ |= 0x00000100;
                containerState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerHistoryDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int ALLOCATED_RESOURCE_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto allocatedResource_;
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public boolean hasAllocatedResource() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource() {
      return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
    }
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder() {
      return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
    }

    public static final int ASSIGNED_NODE_ID_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto assignedNodeId_;
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public boolean hasAssignedNodeId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId() {
      return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
    }
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder() {
      return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int START_TIME_FIELD_NUMBER = 5;
    private long startTime_;
    /**
     * optional int64 start_time = 5;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional int64 start_time = 5;
     */
    public long getStartTime() {
      return startTime_;
    }

    public static final int FINISH_TIME_FIELD_NUMBER = 6;
    private long finishTime_;
    /**
     * optional int64 finish_time = 6;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * optional int64 finish_time = 6;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 7;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 7;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * optional string diagnostics_info = 7;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 7;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CONTAINER_EXIT_STATUS_FIELD_NUMBER = 8;
    private int containerExitStatus_;
    /**
     * optional int32 container_exit_status = 8;
     */
    public boolean hasContainerExitStatus() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * optional int32 container_exit_status = 8;
     */
    public int getContainerExitStatus() {
      return containerExitStatus_;
    }

    public static final int CONTAINER_STATE_FIELD_NUMBER = 9;
    private int containerState_;
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 9;
     */
    public boolean hasContainerState() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 9;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(containerState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasAllocatedResource()) {
        if (!getAllocatedResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getAllocatedResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getAssignedNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(5, startTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, finishTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 7, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt32(8, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeEnum(9, containerState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getAllocatedResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getAssignedNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, startTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, finishTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(7, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(8, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(9, containerState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasAllocatedResource() != other.hasAllocatedResource()) return false;
      if (hasAllocatedResource()) {
        if (!getAllocatedResource()
            .equals(other.getAllocatedResource())) return false;
      }
      if (hasAssignedNodeId() != other.hasAssignedNodeId()) return false;
      if (hasAssignedNodeId()) {
        if (!getAssignedNodeId()
            .equals(other.getAssignedNodeId())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasContainerExitStatus() != other.hasContainerExitStatus()) return false;
      if (hasContainerExitStatus()) {
        if (getContainerExitStatus()
            != other.getContainerExitStatus()) return false;
      }
      if (hasContainerState() != other.hasContainerState()) return false;
      if (hasContainerState()) {
        if (containerState_ != other.containerState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasAllocatedResource()) {
        hash = (37 * hash) + ALLOCATED_RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getAllocatedResource().hashCode();
      }
      if (hasAssignedNodeId()) {
        hash = (37 * hash) + ASSIGNED_NODE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAssignedNodeId().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasContainerExitStatus()) {
        hash = (37 * hash) + CONTAINER_EXIT_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getContainerExitStatus();
      }
      if (hasContainerState()) {
        hash = (37 * hash) + CONTAINER_STATE_FIELD_NUMBER;
        hash = (53 * hash) + containerState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerHistoryDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerHistoryDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerHistoryDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getAllocatedResourceFieldBuilder();
          getAssignedNodeIdFieldBuilder();
          getPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (containerIdBuilder_ == null) {
          containerId_ = null;
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = null;
        } else {
          allocatedResourceBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = null;
        } else {
          assignedNodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        if (priorityBuilder_ == null) {
          priority_ = null;
        } else {
          priorityBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000020);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000040);
        containerExitStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000080);
        containerState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (containerIdBuilder_ == null) {
            result.containerId_ = containerId_;
          } else {
            result.containerId_ = containerIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          if (allocatedResourceBuilder_ == null) {
            result.allocatedResource_ = allocatedResource_;
          } else {
            result.allocatedResource_ = allocatedResourceBuilder_.build();
          }
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          if (assignedNodeIdBuilder_ == null) {
            result.assignedNodeId_ = assignedNodeId_;
          } else {
            result.assignedNodeId_ = assignedNodeIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          if (priorityBuilder_ == null) {
            result.priority_ = priority_;
          } else {
            result.priority_ = priorityBuilder_.build();
          }
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          to_bitField0_ |= 0x00000040;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.containerExitStatus_ = containerExitStatus_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          to_bitField0_ |= 0x00000100;
        }
        result.containerState_ = containerState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasAllocatedResource()) {
          mergeAllocatedResource(other.getAllocatedResource());
        }
        if (other.hasAssignedNodeId()) {
          mergeAssignedNodeId(other.getAssignedNodeId());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000040;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasContainerExitStatus()) {
          setContainerExitStatus(other.getContainerExitStatus());
        }
        if (other.hasContainerState()) {
          setContainerState(other.getContainerState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasAllocatedResource()) {
          if (!getAllocatedResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
          onChanged();
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
          onChanged();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              containerId_ != null &&
              containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            containerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial();
          } else {
            containerId_ = value;
          }
          onChanged();
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = null;
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto allocatedResource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> allocatedResourceBuilder_;
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public boolean hasAllocatedResource() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource() {
        if (allocatedResourceBuilder_ == null) {
          return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
        } else {
          return allocatedResourceBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder setAllocatedResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (allocatedResourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          allocatedResource_ = value;
          onChanged();
        } else {
          allocatedResourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder setAllocatedResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = builderForValue.build();
          onChanged();
        } else {
          allocatedResourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder mergeAllocatedResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (allocatedResourceBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
              allocatedResource_ != null &&
              allocatedResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            allocatedResource_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(allocatedResource_).mergeFrom(value).buildPartial();
          } else {
            allocatedResource_ = value;
          }
          onChanged();
        } else {
          allocatedResourceBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder clearAllocatedResource() {
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = null;
          onChanged();
        } else {
          allocatedResourceBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getAllocatedResourceBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getAllocatedResourceFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder() {
        if (allocatedResourceBuilder_ != null) {
          return allocatedResourceBuilder_.getMessageOrBuilder();
        } else {
          return allocatedResource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
        }
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getAllocatedResourceFieldBuilder() {
        if (allocatedResourceBuilder_ == null) {
          allocatedResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getAllocatedResource(),
                  getParentForChildren(),
                  isClean());
          allocatedResource_ = null;
        }
        return allocatedResourceBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto assignedNodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> assignedNodeIdBuilder_;
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public boolean hasAssignedNodeId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId() {
        if (assignedNodeIdBuilder_ == null) {
          return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
        } else {
          return assignedNodeIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder setAssignedNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (assignedNodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          assignedNodeId_ = value;
          onChanged();
        } else {
          assignedNodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder setAssignedNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = builderForValue.build();
          onChanged();
        } else {
          assignedNodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder mergeAssignedNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (assignedNodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
              assignedNodeId_ != null &&
              assignedNodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            assignedNodeId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder(assignedNodeId_).mergeFrom(value).buildPartial();
          } else {
            assignedNodeId_ = value;
          }
          onChanged();
        } else {
          assignedNodeIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder clearAssignedNodeId() {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = null;
          onChanged();
        } else {
          assignedNodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getAssignedNodeIdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getAssignedNodeIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder() {
        if (assignedNodeIdBuilder_ != null) {
          return assignedNodeIdBuilder_.getMessageOrBuilder();
        } else {
          return assignedNodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getAssignedNodeIdFieldBuilder() {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getAssignedNodeId(),
                  getParentForChildren(),
                  isClean());
          assignedNodeId_ = null;
        }
        return assignedNodeIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
          onChanged();
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
          onChanged();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
              priority_ != null &&
              priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            priority_ =
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder(priority_).mergeFrom(value).buildPartial();
          } else {
            priority_ = value;
          }
          onChanged();
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder clearPriority() {
        if (priorityBuilder_ == null) {
          priority_ = null;
          onChanged();
        } else {
          priorityBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private long startTime_ ;
      /**
       * optional int64 start_time = 5;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional int64 start_time = 5;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 5;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000010;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 5;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000010);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long finishTime_ ;
      /**
       * optional int64 finish_time = 6;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * optional int64 finish_time = 6;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 6;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000020;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 6;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 7;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * optional string diagnostics_info = 7;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 7;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 7;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000040;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 7;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000040);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 7;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000040;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int containerExitStatus_ ;
      /**
       * optional int32 container_exit_status = 8;
       */
      public boolean hasContainerExitStatus() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * optional int32 container_exit_status = 8;
       */
      public int getContainerExitStatus() {
        return containerExitStatus_;
      }
      /**
       * optional int32 container_exit_status = 8;
       */
      public Builder setContainerExitStatus(int value) {
        bitField0_ |= 0x00000080;
        containerExitStatus_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 container_exit_status = 8;
       */
      public Builder clearContainerExitStatus() {
        bitField0_ = (bitField0_ & ~0x00000080);
        containerExitStatus_ = 0;
        onChanged();
        return this;
      }

      private int containerState_ = 1;
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 9;
       */
      public boolean hasContainerState() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 9;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(containerState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 9;
       */
      public Builder setContainerState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000100;
        containerState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 9;
       */
      public Builder clearContainerState() {
        bitField0_ = (bitField0_ & ~0x00000100);
        containerState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerHistoryDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerHistoryDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ContainerHistoryDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ContainerHistoryDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerHistoryDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerStartDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerStartDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    boolean hasContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    boolean hasAllocatedResource();
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource();
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder();

    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    boolean hasAssignedNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId();
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder();

    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    boolean hasPriority();
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * optional int64 start_time = 5;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 5;
     */
    long getStartTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerStartDataProto}
   */
  public  static final class ContainerStartDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerStartDataProto)
      ContainerStartDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerStartDataProto.newBuilder() to construct.
    private ContainerStartDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ContainerStartDataProto() {
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ContainerStartDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = containerId_.toBuilder();
              }
              containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containerId_);
                containerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 18: {
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000002) != 0)) {
                subBuilder = allocatedResource_.toBuilder();
              }
              allocatedResource_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(allocatedResource_);
                allocatedResource_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000002;
              break;
            }
            case 26: {
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000004) != 0)) {
                subBuilder = assignedNodeId_.toBuilder();
              }
              assignedNodeId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(assignedNodeId_);
                assignedNodeId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000004;
              break;
            }
            case 34: {
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000008) != 0)) {
                subBuilder = priority_.toBuilder();
              }
              priority_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(priority_);
                priority_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000008;
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              startTime_ = input.readInt64();
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerStartDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerStartDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int ALLOCATED_RESOURCE_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto allocatedResource_;
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public boolean hasAllocatedResource() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource() {
      return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
    }
    /**
     * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder() {
      return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
    }

    public static final int ASSIGNED_NODE_ID_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto assignedNodeId_;
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public boolean hasAssignedNodeId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId() {
      return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
    }
    /**
     * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder() {
      return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * optional .hadoop.yarn.PriorityProto priority = 4;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int START_TIME_FIELD_NUMBER = 5;
    private long startTime_;
    /**
     * optional int64 start_time = 5;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional int64 start_time = 5;
     */
    public long getStartTime() {
      return startTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasAllocatedResource()) {
        if (!getAllocatedResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getAllocatedResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getAssignedNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(5, startTime_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getAllocatedResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getAssignedNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, startTime_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasAllocatedResource() != other.hasAllocatedResource()) return false;
      if (hasAllocatedResource()) {
        if (!getAllocatedResource()
            .equals(other.getAllocatedResource())) return false;
      }
      if (hasAssignedNodeId() != other.hasAssignedNodeId()) return false;
      if (hasAssignedNodeId()) {
        if (!getAssignedNodeId()
            .equals(other.getAssignedNodeId())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasAllocatedResource()) {
        hash = (37 * hash) + ALLOCATED_RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getAllocatedResource().hashCode();
      }
      if (hasAssignedNodeId()) {
        hash = (37 * hash) + ASSIGNED_NODE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAssignedNodeId().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerStartDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerStartDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerStartDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerStartDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getAllocatedResourceFieldBuilder();
          getAssignedNodeIdFieldBuilder();
          getPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (containerIdBuilder_ == null) {
          containerId_ = null;
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = null;
        } else {
          allocatedResourceBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = null;
        } else {
          assignedNodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        if (priorityBuilder_ == null) {
          priority_ = null;
        } else {
          priorityBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerStartDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (containerIdBuilder_ == null) {
            result.containerId_ = containerId_;
          } else {
            result.containerId_ = containerIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          if (allocatedResourceBuilder_ == null) {
            result.allocatedResource_ = allocatedResource_;
          } else {
            result.allocatedResource_ = allocatedResourceBuilder_.build();
          }
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          if (assignedNodeIdBuilder_ == null) {
            result.assignedNodeId_ = assignedNodeId_;
          } else {
            result.assignedNodeId_ = assignedNodeIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          if (priorityBuilder_ == null) {
            result.priority_ = priority_;
          } else {
            result.priority_ = priorityBuilder_.build();
          }
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasAllocatedResource()) {
          mergeAllocatedResource(other.getAllocatedResource());
        }
        if (other.hasAssignedNodeId()) {
          mergeAssignedNodeId(other.getAssignedNodeId());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasAllocatedResource()) {
          if (!getAllocatedResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
          onChanged();
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
          onChanged();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              containerId_ != null &&
              containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            containerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial();
          } else {
            containerId_ = value;
          }
          onChanged();
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = null;
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto allocatedResource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> allocatedResourceBuilder_;
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public boolean hasAllocatedResource() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getAllocatedResource() {
        if (allocatedResourceBuilder_ == null) {
          return allocatedResource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
        } else {
          return allocatedResourceBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder setAllocatedResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (allocatedResourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          allocatedResource_ = value;
          onChanged();
        } else {
          allocatedResourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder setAllocatedResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = builderForValue.build();
          onChanged();
        } else {
          allocatedResourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder mergeAllocatedResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (allocatedResourceBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
              allocatedResource_ != null &&
              allocatedResource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            allocatedResource_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(allocatedResource_).mergeFrom(value).buildPartial();
          } else {
            allocatedResource_ = value;
          }
          onChanged();
        } else {
          allocatedResourceBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public Builder clearAllocatedResource() {
        if (allocatedResourceBuilder_ == null) {
          allocatedResource_ = null;
          onChanged();
        } else {
          allocatedResourceBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getAllocatedResourceBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getAllocatedResourceFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getAllocatedResourceOrBuilder() {
        if (allocatedResourceBuilder_ != null) {
          return allocatedResourceBuilder_.getMessageOrBuilder();
        } else {
          return allocatedResource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : allocatedResource_;
        }
      }
      /**
       * optional .hadoop.yarn.ResourceProto allocated_resource = 2;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getAllocatedResourceFieldBuilder() {
        if (allocatedResourceBuilder_ == null) {
          allocatedResourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getAllocatedResource(),
                  getParentForChildren(),
                  isClean());
          allocatedResource_ = null;
        }
        return allocatedResourceBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto assignedNodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> assignedNodeIdBuilder_;
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public boolean hasAssignedNodeId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getAssignedNodeId() {
        if (assignedNodeIdBuilder_ == null) {
          return assignedNodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
        } else {
          return assignedNodeIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder setAssignedNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (assignedNodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          assignedNodeId_ = value;
          onChanged();
        } else {
          assignedNodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder setAssignedNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = builderForValue.build();
          onChanged();
        } else {
          assignedNodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder mergeAssignedNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (assignedNodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
              assignedNodeId_ != null &&
              assignedNodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            assignedNodeId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder(assignedNodeId_).mergeFrom(value).buildPartial();
          } else {
            assignedNodeId_ = value;
          }
          onChanged();
        } else {
          assignedNodeIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public Builder clearAssignedNodeId() {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeId_ = null;
          onChanged();
        } else {
          assignedNodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getAssignedNodeIdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getAssignedNodeIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getAssignedNodeIdOrBuilder() {
        if (assignedNodeIdBuilder_ != null) {
          return assignedNodeIdBuilder_.getMessageOrBuilder();
        } else {
          return assignedNodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : assignedNodeId_;
        }
      }
      /**
       * optional .hadoop.yarn.NodeIdProto assigned_node_id = 3;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getAssignedNodeIdFieldBuilder() {
        if (assignedNodeIdBuilder_ == null) {
          assignedNodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getAssignedNodeId(),
                  getParentForChildren(),
                  isClean());
          assignedNodeId_ = null;
        }
        return assignedNodeIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
          onChanged();
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
          onChanged();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
              priority_ != null &&
              priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            priority_ =
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder(priority_).mergeFrom(value).buildPartial();
          } else {
            priority_ = value;
          }
          onChanged();
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public Builder clearPriority() {
        if (priorityBuilder_ == null) {
          priority_ = null;
          onChanged();
        } else {
          priorityBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * optional .hadoop.yarn.PriorityProto priority = 4;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private long startTime_ ;
      /**
       * optional int64 start_time = 5;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional int64 start_time = 5;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 5;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000010;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 5;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000010);
        startTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerStartDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerStartDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ContainerStartDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ContainerStartDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerFinishDataProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerFinishDataProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    boolean hasContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * optional int64 finish_time = 2;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 2;
     */
    long getFinishTime();

    /**
     * optional string diagnostics_info = 3;
     */
    boolean hasDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * optional string diagnostics_info = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * optional int32 container_exit_status = 4;
     */
    boolean hasContainerExitStatus();
    /**
     * optional int32 container_exit_status = 4;
     */
    int getContainerExitStatus();

    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 5;
     */
    boolean hasContainerState();
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 5;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerFinishDataProto}
   */
  public  static final class ContainerFinishDataProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerFinishDataProto)
      ContainerFinishDataProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerFinishDataProto.newBuilder() to construct.
    private ContainerFinishDataProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) {
      super(builder);
    }
    private ContainerFinishDataProto() {
      diagnosticsInfo_ = "";
      containerState_ = 1;
    }

    @java.lang.Override
    public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
    getUnknownFields() {
      return this.unknownFields;
    }
    private ContainerFinishDataProto(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      this();
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      int mutable_bitField0_ = 0;
      org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
          org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) != 0)) {
                subBuilder = containerId_.toBuilder();
              }
              containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containerId_);
                containerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              finishTime_ = input.readInt64();
              break;
            }
            case 26: {
              org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
              bitField0_ |= 0x00000004;
              diagnosticsInfo_ = bs;
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              containerExitStatus_ = input.readInt32();
              break;
            }
            case 40: {
              int rawValue = input.readEnum();
                @SuppressWarnings("deprecation")
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(5, rawValue);
              } else {
                bitField0_ |= 0x00000010;
                containerState_ = rawValue;
              }
              break;
            }
            default: {
              if (!parseUnknownField(
                  input, unknownFields, extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
            e).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerFinishDataProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int FINISH_TIME_FIELD_NUMBER = 2;
    private long finishTime_;
    /**
     * optional int64 finish_time = 2;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * optional int64 finish_time = 2;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 3;
    private volatile java.lang.Object diagnosticsInfo_;
    /**
     * optional string diagnostics_info = 3;
     */
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics_info = 3;
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CONTAINER_EXIT_STATUS_FIELD_NUMBER = 4;
    private int containerExitStatus_;
    /**
     * optional int32 container_exit_status = 4;
     */
    public boolean hasContainerExitStatus() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * optional int32 container_exit_status = 4;
     */
    public int getContainerExitStatus() {
      return containerExitStatus_;
    }

    public static final int CONTAINER_STATE_FIELD_NUMBER = 5;
    private int containerState_;
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 5;
     */
    public boolean hasContainerState() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * optional .hadoop.yarn.ContainerStateProto container_state = 5;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
      @SuppressWarnings("deprecation")
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(containerState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, finishTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(4, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, containerState_);
      }
      unknownFields.writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, finishTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, containerState_);
      }
      size += unknownFields.getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto other = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasContainerExitStatus() != other.hasContainerExitStatus()) return false;
      if (hasContainerExitStatus()) {
        if (getContainerExitStatus()
            != other.getContainerExitStatus()) return false;
      }
      if (hasContainerState() != other.hasContainerState()) return false;
      if (hasContainerState()) {
        if (containerState_ != other.containerState_) return false;
      }
      if (!unknownFields.equals(other.unknownFields)) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasContainerExitStatus()) {
        hash = (37 * hash) + CONTAINER_EXIT_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getContainerExitStatus();
      }
      if (hasContainerState()) {
        hash = (37 * hash) + CONTAINER_STATE_FIELD_NUMBER;
        hash = (53 * hash) + containerState_;
      }
      hash = (29 * hash) + unknownFields.hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerFinishDataProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerFinishDataProto)
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerFinishDataProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.class, org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        if (containerIdBuilder_ == null) {
          containerId_ = null;
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        diagnosticsInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        containerExitStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        containerState_ = 1;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto build() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto buildPartial() {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto result = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          if (containerIdBuilder_ == null) {
            result.containerId_ = containerId_;
          } else {
            result.containerId_ = containerIdBuilder_.build();
          }
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          to_bitField0_ |= 0x00000004;
        }
        result.diagnosticsInfo_ = diagnosticsInfo_;
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.containerExitStatus_ = containerExitStatus_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          to_bitField0_ |= 0x00000010;
        }
        result.containerState_ = containerState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto other) {
        if (other == org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasDiagnosticsInfo()) {
          bitField0_ |= 0x00000004;
          diagnosticsInfo_ = other.diagnosticsInfo_;
          onChanged();
        }
        if (other.hasContainerExitStatus()) {
          setContainerExitStatus(other.getContainerExitStatus());
        }
        if (other.hasContainerState()) {
          setContainerState(other.getContainerState());
        }
        this.mergeUnknownFields(other.unknownFields);
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto) e.getUnfinishedMessage();
          throw e.unwrapIOException();
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
          onChanged();
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
          onChanged();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
              containerId_ != null &&
              containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            containerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial();
          } else {
            containerId_ = value;
          }
          onChanged();
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = null;
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 1;
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private long finishTime_ ;
      /**
       * optional int64 finish_time = 2;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * optional int64 finish_time = 2;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 2;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000002;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 2;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000002);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "";
      /**
       * optional string diagnostics_info = 3;
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder clearDiagnosticsInfo() {
        bitField0_ = (bitField0_ & ~0x00000004);
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics_info = 3;
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        diagnosticsInfo_ = value;
        onChanged();
        return this;
      }

      private int containerExitStatus_ ;
      /**
       * optional int32 container_exit_status = 4;
       */
      public boolean hasContainerExitStatus() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * optional int32 container_exit_status = 4;
       */
      public int getContainerExitStatus() {
        return containerExitStatus_;
      }
      /**
       * optional int32 container_exit_status = 4;
       */
      public Builder setContainerExitStatus(int value) {
        bitField0_ |= 0x00000008;
        containerExitStatus_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 container_exit_status = 4;
       */
      public Builder clearContainerExitStatus() {
        bitField0_ = (bitField0_ & ~0x00000008);
        containerExitStatus_ = 0;
        onChanged();
        return this;
      }

      private int containerState_ = 1;
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 5;
       */
      public boolean hasContainerState() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 5;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
        @SuppressWarnings("deprecation")
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.valueOf(containerState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 5;
       */
      public Builder setContainerState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        containerState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerStateProto container_state = 5;
       */
      public Builder clearContainerState() {
        bitField0_ = (bitField0_ & ~0x00000010);
        containerState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerFinishDataProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerFinishDataProto)
    private static final org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto();
    }

    public static org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
      @java.lang.Override
      public ContainerFinishDataProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        return new ContainerFinishDataProto(input, extensionRegistry);
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationHistoryDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationStartDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationFinishDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerHistoryDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerStartDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerStartDataProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerFinishDataProto_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\'server/application_history_server.prot" +
      "o\022\013hadoop.yarn\032\021yarn_protos.proto\"\223\003\n\033Ap" +
      "plicationHistoryDataProto\0227\n\016application" +
      "_id\030\001 \001(\0132\037.hadoop.yarn.ApplicationIdPro" +
      "to\022\030\n\020application_name\030\002 \001(\t\022\030\n\020applicat" +
      "ion_type\030\003 \001(\t\022\014\n\004user\030\004 \001(\t\022\r\n\005queue\030\005 " +
      "\001(\t\022\023\n\013submit_time\030\006 \001(\003\022\022\n\nstart_time\030\007" +
      " \001(\003\022\023\n\013finish_time\030\010 \001(\003\022\030\n\020diagnostics" +
      "_info\030\t \001(\t\022J\n\030final_application_status\030" +
      "\n \001(\0162(.hadoop.yarn.FinalApplicationStat" +
      "usProto\022F\n\026yarn_application_state\030\013 \001(\0162" +
      "&.hadoop.yarn.YarnApplicationStateProto\"" +
      "\316\001\n\031ApplicationStartDataProto\0227\n\016applica" +
      "tion_id\030\001 \001(\0132\037.hadoop.yarn.ApplicationI" +
      "dProto\022\030\n\020application_name\030\002 \001(\t\022\030\n\020appl" +
      "ication_type\030\003 \001(\t\022\014\n\004user\030\004 \001(\t\022\r\n\005queu" +
      "e\030\005 \001(\t\022\023\n\013submit_time\030\006 \001(\003\022\022\n\nstart_ti" +
      "me\030\007 \001(\003\"\230\002\n\032ApplicationFinishDataProto\022" +
      "7\n\016application_id\030\001 \001(\0132\037.hadoop.yarn.Ap" +
      "plicationIdProto\022\023\n\013finish_time\030\002 \001(\003\022\030\n" +
      "\020diagnostics_info\030\003 \001(\t\022J\n\030final_applica" +
      "tion_status\030\004 \001(\0162(.hadoop.yarn.FinalApp" +
      "licationStatusProto\022F\n\026yarn_application_" +
      "state\030\005 \001(\0162&.hadoop.yarn.YarnApplicatio" +
      "nStateProto\"\233\003\n\"ApplicationAttemptHistor" +
      "yDataProto\022F\n\026application_attempt_id\030\001 \001" +
      "(\0132&.hadoop.yarn.ApplicationAttemptIdPro" +
      "to\022\014\n\004host\030\002 \001(\t\022\020\n\010rpc_port\030\003 \001(\005\022\024\n\014tr" +
      "acking_url\030\004 \001(\t\022\030\n\020diagnostics_info\030\005 \001" +
      "(\t\022J\n\030final_application_status\030\006 \001(\0162(.h" +
      "adoop.yarn.FinalApplicationStatusProto\022:" +
      "\n\023master_container_id\030\007 \001(\0132\035.hadoop.yar" +
      "n.ContainerIdProto\022U\n\036yarn_application_a" +
      "ttempt_state\030\010 \001(\0162-.hadoop.yarn.YarnApp" +
      "licationAttemptStateProto\"\306\001\n Applicatio" +
      "nAttemptStartDataProto\022F\n\026application_at" +
      "tempt_id\030\001 \001(\0132&.hadoop.yarn.Application" +
      "AttemptIdProto\022\014\n\004host\030\002 \001(\t\022\020\n\010rpc_port" +
      "\030\003 \001(\005\022:\n\023master_container_id\030\004 \001(\0132\035.ha" +
      "doop.yarn.ContainerIdProto\"\276\002\n!Applicati" +
      "onAttemptFinishDataProto\022F\n\026application_" +
      "attempt_id\030\001 \001(\0132&.hadoop.yarn.Applicati" +
      "onAttemptIdProto\022\024\n\014tracking_url\030\002 \001(\t\022\030" +
      "\n\020diagnostics_info\030\003 \001(\t\022J\n\030final_applic" +
      "ation_status\030\004 \001(\0162(.hadoop.yarn.FinalAp" +
      "plicationStatusProto\022U\n\036yarn_application" +
      "_attempt_state\030\005 \001(\0162-.hadoop.yarn.YarnA" +
      "pplicationAttemptStateProto\"\207\003\n\031Containe" +
      "rHistoryDataProto\0223\n\014container_id\030\001 \001(\0132" +
      "\035.hadoop.yarn.ContainerIdProto\0226\n\022alloca" +
      "ted_resource\030\002 \001(\0132\032.hadoop.yarn.Resourc" +
      "eProto\0222\n\020assigned_node_id\030\003 \001(\0132\030.hadoo" +
      "p.yarn.NodeIdProto\022,\n\010priority\030\004 \001(\0132\032.h" +
      "adoop.yarn.PriorityProto\022\022\n\nstart_time\030\005" +
      " \001(\003\022\023\n\013finish_time\030\006 \001(\003\022\030\n\020diagnostics" +
      "_info\030\007 \001(\t\022\035\n\025container_exit_status\030\010 \001" +
      "(\005\0229\n\017container_state\030\t \001(\0162 .hadoop.yar" +
      "n.ContainerStateProto\"\374\001\n\027ContainerStart" +
      "DataProto\0223\n\014container_id\030\001 \001(\0132\035.hadoop" +
      ".yarn.ContainerIdProto\0226\n\022allocated_reso" +
      "urce\030\002 \001(\0132\032.hadoop.yarn.ResourceProto\0222" +
      "\n\020assigned_node_id\030\003 \001(\0132\030.hadoop.yarn.N" +
      "odeIdProto\022,\n\010priority\030\004 \001(\0132\032.hadoop.ya" +
      "rn.PriorityProto\022\022\n\nstart_time\030\005 \001(\003\"\330\001\n" +
      "\030ContainerFinishDataProto\0223\n\014container_i" +
      "d\030\001 \001(\0132\035.hadoop.yarn.ContainerIdProto\022\023" +
      "\n\013finish_time\030\002 \001(\003\022\030\n\020diagnostics_info\030" +
      "\003 \001(\t\022\035\n\025container_exit_status\030\004 \001(\005\0229\n\017" +
      "container_state\030\005 \001(\0162 .hadoop.yarn.Cont" +
      "ainerStateProtoBD\n\034org.apache.hadoop.yar" +
      "n.protoB\036ApplicationHistoryServerProtos\210" +
      "\001\001\240\001\001"
    };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
          public org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
              org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
            descriptor = root;
            return null;
          }
        };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        }, assigner);
    internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_ApplicationHistoryDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationHistoryDataProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ApplicationName", "ApplicationType", "User", "Queue", "SubmitTime", "StartTime", "FinishTime", "DiagnosticsInfo", "FinalApplicationStatus", "YarnApplicationState", });
    internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_ApplicationStartDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationStartDataProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ApplicationName", "ApplicationType", "User", "Queue", "SubmitTime", "StartTime", });
    internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_ApplicationFinishDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationFinishDataProto_descriptor,
        new java.lang.String[] { "ApplicationId", "FinishTime", "DiagnosticsInfo", "FinalApplicationStatus", "YarnApplicationState", });
    internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationAttemptHistoryDataProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", "Host", "RpcPort", "TrackingUrl", "DiagnosticsInfo", "FinalApplicationStatus", "MasterContainerId", "YarnApplicationAttemptState", });
    internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationAttemptStartDataProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", "Host", "RpcPort", "MasterContainerId", });
    internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor =
      getDescriptor().getMessageTypes().get(5);
    internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationAttemptFinishDataProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", "TrackingUrl", "DiagnosticsInfo", "FinalApplicationStatus", "YarnApplicationAttemptState", });
    internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor =
      getDescriptor().getMessageTypes().get(6);
    internal_static_hadoop_yarn_ContainerHistoryDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerHistoryDataProto_descriptor,
        new java.lang.String[] { "ContainerId", "AllocatedResource", "AssignedNodeId", "Priority", "StartTime", "FinishTime", "DiagnosticsInfo", "ContainerExitStatus", "ContainerState", });
    internal_static_hadoop_yarn_ContainerStartDataProto_descriptor =
      getDescriptor().getMessageTypes().get(7);
    internal_static_hadoop_yarn_ContainerStartDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerStartDataProto_descriptor,
        new java.lang.String[] { "ContainerId", "AllocatedResource", "AssignedNodeId", "Priority", "StartTime", });
    internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor =
      getDescriptor().getMessageTypes().get(8);
    internal_static_hadoop_yarn_ContainerFinishDataProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerFinishDataProto_descriptor,
        new java.lang.String[] { "ContainerId", "FinishTime", "DiagnosticsInfo", "ContainerExitStatus", "ContainerState", });
    org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy