All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.mapreduce.v2.proto.MRProtos Maven / Gradle / Ivy

There is a newer version: 3.4.1
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: mr_protos.proto

package org.apache.hadoop.mapreduce.v2.proto;

public final class MRProtos {
  private MRProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  /**
   * Protobuf enum {@code hadoop.mapreduce.TaskTypeProto}
   */
  public enum TaskTypeProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * MAP = 1;
     */
    MAP(0, 1),
    /**
     * REDUCE = 2;
     */
    REDUCE(1, 2),
    ;

    /**
     * MAP = 1;
     */
    public static final int MAP_VALUE = 1;
    /**
     * REDUCE = 2;
     */
    public static final int REDUCE_VALUE = 2;


    public final int getNumber() { return value; }

    public static TaskTypeProto valueOf(int value) {
      switch (value) {
        case 1: return MAP;
        case 2: return REDUCE;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public TaskTypeProto findValueByNumber(int number) {
              return TaskTypeProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final TaskTypeProto[] VALUES = values();

    public static TaskTypeProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private TaskTypeProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.TaskTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.mapreduce.TaskStateProto}
   */
  public enum TaskStateProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * TS_NEW = 1;
     */
    TS_NEW(0, 1),
    /**
     * TS_SCHEDULED = 2;
     */
    TS_SCHEDULED(1, 2),
    /**
     * TS_RUNNING = 3;
     */
    TS_RUNNING(2, 3),
    /**
     * TS_SUCCEEDED = 4;
     */
    TS_SUCCEEDED(3, 4),
    /**
     * TS_FAILED = 5;
     */
    TS_FAILED(4, 5),
    /**
     * TS_KILLED = 6;
     */
    TS_KILLED(5, 6),
    ;

    /**
     * TS_NEW = 1;
     */
    public static final int TS_NEW_VALUE = 1;
    /**
     * TS_SCHEDULED = 2;
     */
    public static final int TS_SCHEDULED_VALUE = 2;
    /**
     * TS_RUNNING = 3;
     */
    public static final int TS_RUNNING_VALUE = 3;
    /**
     * TS_SUCCEEDED = 4;
     */
    public static final int TS_SUCCEEDED_VALUE = 4;
    /**
     * TS_FAILED = 5;
     */
    public static final int TS_FAILED_VALUE = 5;
    /**
     * TS_KILLED = 6;
     */
    public static final int TS_KILLED_VALUE = 6;


    public final int getNumber() { return value; }

    public static TaskStateProto valueOf(int value) {
      switch (value) {
        case 1: return TS_NEW;
        case 2: return TS_SCHEDULED;
        case 3: return TS_RUNNING;
        case 4: return TS_SUCCEEDED;
        case 5: return TS_FAILED;
        case 6: return TS_KILLED;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public TaskStateProto findValueByNumber(int number) {
              return TaskStateProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(1);
    }

    private static final TaskStateProto[] VALUES = values();

    public static TaskStateProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private TaskStateProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.TaskStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.mapreduce.PhaseProto}
   */
  public enum PhaseProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * P_STARTING = 1;
     */
    P_STARTING(0, 1),
    /**
     * P_MAP = 2;
     */
    P_MAP(1, 2),
    /**
     * P_SHUFFLE = 3;
     */
    P_SHUFFLE(2, 3),
    /**
     * P_SORT = 4;
     */
    P_SORT(3, 4),
    /**
     * P_REDUCE = 5;
     */
    P_REDUCE(4, 5),
    /**
     * P_CLEANUP = 6;
     */
    P_CLEANUP(5, 6),
    ;

    /**
     * P_STARTING = 1;
     */
    public static final int P_STARTING_VALUE = 1;
    /**
     * P_MAP = 2;
     */
    public static final int P_MAP_VALUE = 2;
    /**
     * P_SHUFFLE = 3;
     */
    public static final int P_SHUFFLE_VALUE = 3;
    /**
     * P_SORT = 4;
     */
    public static final int P_SORT_VALUE = 4;
    /**
     * P_REDUCE = 5;
     */
    public static final int P_REDUCE_VALUE = 5;
    /**
     * P_CLEANUP = 6;
     */
    public static final int P_CLEANUP_VALUE = 6;


    public final int getNumber() { return value; }

    public static PhaseProto valueOf(int value) {
      switch (value) {
        case 1: return P_STARTING;
        case 2: return P_MAP;
        case 3: return P_SHUFFLE;
        case 4: return P_SORT;
        case 5: return P_REDUCE;
        case 6: return P_CLEANUP;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public PhaseProto findValueByNumber(int number) {
              return PhaseProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(2);
    }

    private static final PhaseProto[] VALUES = values();

    public static PhaseProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private PhaseProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.PhaseProto)
  }

  /**
   * Protobuf enum {@code hadoop.mapreduce.TaskAttemptStateProto}
   */
  public enum TaskAttemptStateProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * TA_NEW = 1;
     */
    TA_NEW(0, 1),
    /**
     * TA_STARTING = 2;
     */
    TA_STARTING(1, 2),
    /**
     * TA_RUNNING = 3;
     */
    TA_RUNNING(2, 3),
    /**
     * TA_COMMIT_PENDING = 4;
     */
    TA_COMMIT_PENDING(3, 4),
    /**
     * TA_SUCCEEDED = 5;
     */
    TA_SUCCEEDED(4, 5),
    /**
     * TA_FAILED = 6;
     */
    TA_FAILED(5, 6),
    /**
     * TA_KILLED = 7;
     */
    TA_KILLED(6, 7),
    ;

    /**
     * TA_NEW = 1;
     */
    public static final int TA_NEW_VALUE = 1;
    /**
     * TA_STARTING = 2;
     */
    public static final int TA_STARTING_VALUE = 2;
    /**
     * TA_RUNNING = 3;
     */
    public static final int TA_RUNNING_VALUE = 3;
    /**
     * TA_COMMIT_PENDING = 4;
     */
    public static final int TA_COMMIT_PENDING_VALUE = 4;
    /**
     * TA_SUCCEEDED = 5;
     */
    public static final int TA_SUCCEEDED_VALUE = 5;
    /**
     * TA_FAILED = 6;
     */
    public static final int TA_FAILED_VALUE = 6;
    /**
     * TA_KILLED = 7;
     */
    public static final int TA_KILLED_VALUE = 7;


    public final int getNumber() { return value; }

    public static TaskAttemptStateProto valueOf(int value) {
      switch (value) {
        case 1: return TA_NEW;
        case 2: return TA_STARTING;
        case 3: return TA_RUNNING;
        case 4: return TA_COMMIT_PENDING;
        case 5: return TA_SUCCEEDED;
        case 6: return TA_FAILED;
        case 7: return TA_KILLED;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public TaskAttemptStateProto findValueByNumber(int number) {
              return TaskAttemptStateProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(3);
    }

    private static final TaskAttemptStateProto[] VALUES = values();

    public static TaskAttemptStateProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private TaskAttemptStateProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.TaskAttemptStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.mapreduce.JobStateProto}
   */
  public enum JobStateProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * J_NEW = 1;
     */
    J_NEW(0, 1),
    /**
     * J_INITED = 2;
     */
    J_INITED(1, 2),
    /**
     * J_RUNNING = 3;
     */
    J_RUNNING(2, 3),
    /**
     * J_SUCCEEDED = 4;
     */
    J_SUCCEEDED(3, 4),
    /**
     * J_FAILED = 5;
     */
    J_FAILED(4, 5),
    /**
     * J_KILLED = 6;
     */
    J_KILLED(5, 6),
    /**
     * J_ERROR = 7;
     */
    J_ERROR(6, 7),
    ;

    /**
     * J_NEW = 1;
     */
    public static final int J_NEW_VALUE = 1;
    /**
     * J_INITED = 2;
     */
    public static final int J_INITED_VALUE = 2;
    /**
     * J_RUNNING = 3;
     */
    public static final int J_RUNNING_VALUE = 3;
    /**
     * J_SUCCEEDED = 4;
     */
    public static final int J_SUCCEEDED_VALUE = 4;
    /**
     * J_FAILED = 5;
     */
    public static final int J_FAILED_VALUE = 5;
    /**
     * J_KILLED = 6;
     */
    public static final int J_KILLED_VALUE = 6;
    /**
     * J_ERROR = 7;
     */
    public static final int J_ERROR_VALUE = 7;


    public final int getNumber() { return value; }

    public static JobStateProto valueOf(int value) {
      switch (value) {
        case 1: return J_NEW;
        case 2: return J_INITED;
        case 3: return J_RUNNING;
        case 4: return J_SUCCEEDED;
        case 5: return J_FAILED;
        case 6: return J_KILLED;
        case 7: return J_ERROR;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public JobStateProto findValueByNumber(int number) {
              return JobStateProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(4);
    }

    private static final JobStateProto[] VALUES = values();

    public static JobStateProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private JobStateProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.JobStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.mapreduce.TaskAttemptCompletionEventStatusProto}
   */
  public enum TaskAttemptCompletionEventStatusProto
      implements com.google.protobuf.ProtocolMessageEnum {
    /**
     * TACE_FAILED = 1;
     */
    TACE_FAILED(0, 1),
    /**
     * TACE_KILLED = 2;
     */
    TACE_KILLED(1, 2),
    /**
     * TACE_SUCCEEDED = 3;
     */
    TACE_SUCCEEDED(2, 3),
    /**
     * TACE_OBSOLETE = 4;
     */
    TACE_OBSOLETE(3, 4),
    /**
     * TACE_TIPFAILED = 5;
     */
    TACE_TIPFAILED(4, 5),
    ;

    /**
     * TACE_FAILED = 1;
     */
    public static final int TACE_FAILED_VALUE = 1;
    /**
     * TACE_KILLED = 2;
     */
    public static final int TACE_KILLED_VALUE = 2;
    /**
     * TACE_SUCCEEDED = 3;
     */
    public static final int TACE_SUCCEEDED_VALUE = 3;
    /**
     * TACE_OBSOLETE = 4;
     */
    public static final int TACE_OBSOLETE_VALUE = 4;
    /**
     * TACE_TIPFAILED = 5;
     */
    public static final int TACE_TIPFAILED_VALUE = 5;


    public final int getNumber() { return value; }

    public static TaskAttemptCompletionEventStatusProto valueOf(int value) {
      switch (value) {
        case 1: return TACE_FAILED;
        case 2: return TACE_KILLED;
        case 3: return TACE_SUCCEEDED;
        case 4: return TACE_OBSOLETE;
        case 5: return TACE_TIPFAILED;
        default: return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public TaskAttemptCompletionEventStatusProto findValueByNumber(int number) {
              return TaskAttemptCompletionEventStatusProto.valueOf(number);
            }
          };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor().getEnumTypes().get(5);
    }

    private static final TaskAttemptCompletionEventStatusProto[] VALUES = values();

    public static TaskAttemptCompletionEventStatusProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int index;
    private final int value;

    private TaskAttemptCompletionEventStatusProto(int index, int value) {
      this.index = index;
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.mapreduce.TaskAttemptCompletionEventStatusProto)
  }

  public interface JobIdProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.yarn.ApplicationIdProto app_id = 1;
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    boolean hasAppId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId();
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder();

    // optional int32 id = 2;
    /**
     * optional int32 id = 2;
     */
    boolean hasId();
    /**
     * optional int32 id = 2;
     */
    int getId();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.JobIdProto}
   */
  public static final class JobIdProto extends
      com.google.protobuf.GeneratedMessage
      implements JobIdProtoOrBuilder {
    // Use JobIdProto.newBuilder() to construct.
    private JobIdProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private JobIdProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final JobIdProto defaultInstance;
    public static JobIdProto getDefaultInstance() {
      return defaultInstance;
    }

    public JobIdProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private JobIdProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = appId_.toBuilder();
              }
              appId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(appId_);
                appId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              id_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobIdProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public JobIdProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new JobIdProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.yarn.ApplicationIdProto app_id = 1;
    public static final int APP_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    public boolean hasAppId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
      return appId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
      return appId_;
    }

    // optional int32 id = 2;
    public static final int ID_FIELD_NUMBER = 2;
    private int id_;
    /**
     * optional int32 id = 2;
     */
    public boolean hasId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional int32 id = 2;
     */
    public int getId() {
      return id_;
    }

    private void initFields() {
      appId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance();
      id_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, appId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeInt32(2, id_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, appId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(2, id_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto) obj;

      boolean result = true;
      result = result && (hasAppId() == other.hasAppId());
      if (hasAppId()) {
        result = result && getAppId()
            .equals(other.getAppId());
      }
      result = result && (hasId() == other.hasId());
      if (hasId()) {
        result = result && (getId()
            == other.getId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasAppId()) {
        hash = (37 * hash) + APP_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAppId().hashCode();
      }
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.JobIdProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobIdProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getAppIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (appIdBuilder_ == null) {
          appId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance();
        } else {
          appIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobIdProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (appIdBuilder_ == null) {
          result.appId_ = appId_;
        } else {
          result.appId_ = appIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.id_ = id_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) return this;
        if (other.hasAppId()) {
          mergeAppId(other.getAppId());
        }
        if (other.hasId()) {
          setId(other.getId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.yarn.ApplicationIdProto app_id = 1;
      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public boolean hasAppId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
        if (appIdBuilder_ == null) {
          return appId_;
        } else {
          return appIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public Builder setAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (appIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          appId_ = value;
          onChanged();
        } else {
          appIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public Builder setAppId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (appIdBuilder_ == null) {
          appId_ = builderForValue.build();
          onChanged();
        } else {
          appIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public Builder mergeAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (appIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            appId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(appId_).mergeFrom(value).buildPartial();
          } else {
            appId_ = value;
          }
          onChanged();
        } else {
          appIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public Builder clearAppId() {
        if (appIdBuilder_ == null) {
          appId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance();
          onChanged();
        } else {
          appIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getAppIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
        if (appIdBuilder_ != null) {
          return appIdBuilder_.getMessageOrBuilder();
        } else {
          return appId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationIdProto app_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getAppIdFieldBuilder() {
        if (appIdBuilder_ == null) {
          appIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  appId_,
                  getParentForChildren(),
                  isClean());
          appId_ = null;
        }
        return appIdBuilder_;
      }

      // optional int32 id = 2;
      private int id_ ;
      /**
       * optional int32 id = 2;
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional int32 id = 2;
       */
      public int getId() {
        return id_;
      }
      /**
       * optional int32 id = 2;
       */
      public Builder setId(int value) {
        bitField0_ |= 0x00000002;
        id_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 id = 2;
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        id_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.JobIdProto)
    }

    static {
      defaultInstance = new JobIdProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.JobIdProto)
  }

  public interface TaskIdProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();

    // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    boolean hasTaskType();
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType();

    // optional int32 id = 3;
    /**
     * optional int32 id = 3;
     */
    boolean hasId();
    /**
     * optional int32 id = 3;
     */
    int getId();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.TaskIdProto}
   */
  public static final class TaskIdProto extends
      com.google.protobuf.GeneratedMessage
      implements TaskIdProtoOrBuilder {
    // Use TaskIdProto.newBuilder() to construct.
    private TaskIdProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private TaskIdProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final TaskIdProto defaultInstance;
    public static TaskIdProto getDefaultInstance() {
      return defaultInstance;
    }

    public TaskIdProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private TaskIdProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                taskType_ = value;
              }
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              id_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskIdProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public TaskIdProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new TaskIdProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
    public static final int TASK_TYPE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto taskType_;
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    public boolean hasTaskType() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
      return taskType_;
    }

    // optional int32 id = 3;
    public static final int ID_FIELD_NUMBER = 3;
    private int id_;
    /**
     * optional int32 id = 3;
     */
    public boolean hasId() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional int32 id = 3;
     */
    public int getId() {
      return id_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
      id_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, taskType_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeInt32(3, id_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, taskType_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(3, id_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result && (hasTaskType() == other.hasTaskType());
      if (hasTaskType()) {
        result = result &&
            (getTaskType() == other.getTaskType());
      }
      result = result && (hasId() == other.hasId());
      if (hasId()) {
        result = result && (getId()
            == other.getId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      if (hasTaskType()) {
        hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getTaskType());
      }
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.TaskIdProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskIdProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
        bitField0_ = (bitField0_ & ~0x00000002);
        id_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskIdProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.taskType_ = taskType_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.id_ = id_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        if (other.hasTaskType()) {
          setTaskType(other.getTaskType());
        }
        if (other.hasId()) {
          setId(other.getId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public boolean hasTaskType() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
        return taskType_;
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public Builder setTaskType(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        taskType_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public Builder clearTaskType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
        onChanged();
        return this;
      }

      // optional int32 id = 3;
      private int id_ ;
      /**
       * optional int32 id = 3;
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional int32 id = 3;
       */
      public int getId() {
        return id_;
      }
      /**
       * optional int32 id = 3;
       */
      public Builder setId(int value) {
        bitField0_ |= 0x00000004;
        id_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 id = 3;
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        id_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.TaskIdProto)
    }

    static {
      defaultInstance = new TaskIdProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.TaskIdProto)
  }

  public interface TaskAttemptIdProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    boolean hasTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();

    // optional int32 id = 2;
    /**
     * optional int32 id = 2;
     */
    boolean hasId();
    /**
     * optional int32 id = 2;
     */
    int getId();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.TaskAttemptIdProto}
   */
  public static final class TaskAttemptIdProto extends
      com.google.protobuf.GeneratedMessage
      implements TaskAttemptIdProtoOrBuilder {
    // Use TaskAttemptIdProto.newBuilder() to construct.
    private TaskAttemptIdProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private TaskAttemptIdProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final TaskAttemptIdProto defaultInstance;
    public static TaskAttemptIdProto getDefaultInstance() {
      return defaultInstance;
    }

    public TaskAttemptIdProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private TaskAttemptIdProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskId_.toBuilder();
              }
              taskId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskId_);
                taskId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              id_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public TaskAttemptIdProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new TaskAttemptIdProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    public static final int TASK_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public boolean hasTaskId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
      return taskId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
      return taskId_;
    }

    // optional int32 id = 2;
    public static final int ID_FIELD_NUMBER = 2;
    private int id_;
    /**
     * optional int32 id = 2;
     */
    public boolean hasId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional int32 id = 2;
     */
    public int getId() {
      return id_;
    }

    private void initFields() {
      taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      id_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeInt32(2, id_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(2, id_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto) obj;

      boolean result = true;
      result = result && (hasTaskId() == other.hasTaskId());
      if (hasTaskId()) {
        result = result && getTaskId()
            .equals(other.getTaskId());
      }
      result = result && (hasId() == other.hasId());
      if (hasId()) {
        result = result && (getId()
            == other.getId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskId()) {
        hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskId().hashCode();
      }
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.TaskAttemptIdProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskIdBuilder_ == null) {
          result.taskId_ = taskId_;
        } else {
          result.taskId_ = taskIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.id_ = id_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) return this;
        if (other.hasTaskId()) {
          mergeTaskId(other.getTaskId());
        }
        if (other.hasId()) {
          setId(other.getId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public boolean hasTaskId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
        if (taskIdBuilder_ == null) {
          return taskId_;
        } else {
          return taskIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskId_ = value;
          onChanged();
        } else {
          taskIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
        if (taskIdBuilder_ == null) {
          taskId_ = builderForValue.build();
          onChanged();
        } else {
          taskIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
            taskId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.newBuilder(taskId_).mergeFrom(value).buildPartial();
          } else {
            taskId_ = value;
          }
          onChanged();
        } else {
          taskIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder clearTaskId() {
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
        if (taskIdBuilder_ != null) {
          return taskIdBuilder_.getMessageOrBuilder();
        } else {
          return taskId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> 
          getTaskIdFieldBuilder() {
        if (taskIdBuilder_ == null) {
          taskIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
                  taskId_,
                  getParentForChildren(),
                  isClean());
          taskId_ = null;
        }
        return taskIdBuilder_;
      }

      // optional int32 id = 2;
      private int id_ ;
      /**
       * optional int32 id = 2;
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional int32 id = 2;
       */
      public int getId() {
        return id_;
      }
      /**
       * optional int32 id = 2;
       */
      public Builder setId(int value) {
        bitField0_ |= 0x00000002;
        id_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 id = 2;
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        id_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.TaskAttemptIdProto)
    }

    static {
      defaultInstance = new TaskAttemptIdProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.TaskAttemptIdProto)
  }

  public interface CounterProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional string name = 1;
    /**
     * optional string name = 1;
     */
    boolean hasName();
    /**
     * optional string name = 1;
     */
    java.lang.String getName();
    /**
     * optional string name = 1;
     */
    com.google.protobuf.ByteString
        getNameBytes();

    // optional string display_name = 2;
    /**
     * optional string display_name = 2;
     */
    boolean hasDisplayName();
    /**
     * optional string display_name = 2;
     */
    java.lang.String getDisplayName();
    /**
     * optional string display_name = 2;
     */
    com.google.protobuf.ByteString
        getDisplayNameBytes();

    // optional int64 value = 3;
    /**
     * optional int64 value = 3;
     */
    boolean hasValue();
    /**
     * optional int64 value = 3;
     */
    long getValue();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.CounterProto}
   */
  public static final class CounterProto extends
      com.google.protobuf.GeneratedMessage
      implements CounterProtoOrBuilder {
    // Use CounterProto.newBuilder() to construct.
    private CounterProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private CounterProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final CounterProto defaultInstance;
    public static CounterProto getDefaultInstance() {
      return defaultInstance;
    }

    public CounterProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private CounterProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              name_ = input.readBytes();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              displayName_ = input.readBytes();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              value_ = input.readInt64();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public CounterProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new CounterProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional string name = 1;
    public static final int NAME_FIELD_NUMBER = 1;
    private java.lang.Object name_;
    /**
     * optional string name = 1;
     */
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional string name = 1;
     */
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * optional string name = 1;
     */
    public com.google.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string display_name = 2;
    public static final int DISPLAY_NAME_FIELD_NUMBER = 2;
    private java.lang.Object displayName_;
    /**
     * optional string display_name = 2;
     */
    public boolean hasDisplayName() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional string display_name = 2;
     */
    public java.lang.String getDisplayName() {
      java.lang.Object ref = displayName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          displayName_ = s;
        }
        return s;
      }
    }
    /**
     * optional string display_name = 2;
     */
    public com.google.protobuf.ByteString
        getDisplayNameBytes() {
      java.lang.Object ref = displayName_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        displayName_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional int64 value = 3;
    public static final int VALUE_FIELD_NUMBER = 3;
    private long value_;
    /**
     * optional int64 value = 3;
     */
    public boolean hasValue() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional int64 value = 3;
     */
    public long getValue() {
      return value_;
    }

    private void initFields() {
      name_ = "";
      displayName_ = "";
      value_ = 0L;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getNameBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, getDisplayNameBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeInt64(3, value_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getNameBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getDisplayNameBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(3, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto) obj;

      boolean result = true;
      result = result && (hasName() == other.hasName());
      if (hasName()) {
        result = result && getName()
            .equals(other.getName());
      }
      result = result && (hasDisplayName() == other.hasDisplayName());
      if (hasDisplayName()) {
        result = result && getDisplayName()
            .equals(other.getDisplayName());
      }
      result = result && (hasValue() == other.hasValue());
      if (hasValue()) {
        result = result && (getValue()
            == other.getValue());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasDisplayName()) {
        hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getDisplayName().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getValue());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.CounterProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        name_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        displayName_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.name_ = name_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.displayName_ = displayName_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.value_ = value_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance()) return this;
        if (other.hasName()) {
          bitField0_ |= 0x00000001;
          name_ = other.name_;
          onChanged();
        }
        if (other.hasDisplayName()) {
          bitField0_ |= 0x00000002;
          displayName_ = other.displayName_;
          onChanged();
        }
        if (other.hasValue()) {
          setValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional string name = 1;
      private java.lang.Object name_ = "";
      /**
       * optional string name = 1;
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional string name = 1;
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          name_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string name = 1;
       */
      public com.google.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string name = 1;
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        name_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string name = 1;
       */
      public Builder clearName() {
        bitField0_ = (bitField0_ & ~0x00000001);
        name_ = getDefaultInstance().getName();
        onChanged();
        return this;
      }
      /**
       * optional string name = 1;
       */
      public Builder setNameBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        name_ = value;
        onChanged();
        return this;
      }

      // optional string display_name = 2;
      private java.lang.Object displayName_ = "";
      /**
       * optional string display_name = 2;
       */
      public boolean hasDisplayName() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional string display_name = 2;
       */
      public java.lang.String getDisplayName() {
        java.lang.Object ref = displayName_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          displayName_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string display_name = 2;
       */
      public com.google.protobuf.ByteString
          getDisplayNameBytes() {
        java.lang.Object ref = displayName_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          displayName_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string display_name = 2;
       */
      public Builder setDisplayName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        displayName_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string display_name = 2;
       */
      public Builder clearDisplayName() {
        bitField0_ = (bitField0_ & ~0x00000002);
        displayName_ = getDefaultInstance().getDisplayName();
        onChanged();
        return this;
      }
      /**
       * optional string display_name = 2;
       */
      public Builder setDisplayNameBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        displayName_ = value;
        onChanged();
        return this;
      }

      // optional int64 value = 3;
      private long value_ ;
      /**
       * optional int64 value = 3;
       */
      public boolean hasValue() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional int64 value = 3;
       */
      public long getValue() {
        return value_;
      }
      /**
       * optional int64 value = 3;
       */
      public Builder setValue(long value) {
        bitField0_ |= 0x00000004;
        value_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 value = 3;
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000004);
        value_ = 0L;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.CounterProto)
    }

    static {
      defaultInstance = new CounterProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.CounterProto)
  }

  public interface CounterGroupProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional string name = 1;
    /**
     * optional string name = 1;
     */
    boolean hasName();
    /**
     * optional string name = 1;
     */
    java.lang.String getName();
    /**
     * optional string name = 1;
     */
    com.google.protobuf.ByteString
        getNameBytes();

    // optional string display_name = 2;
    /**
     * optional string display_name = 2;
     */
    boolean hasDisplayName();
    /**
     * optional string display_name = 2;
     */
    java.lang.String getDisplayName();
    /**
     * optional string display_name = 2;
     */
    com.google.protobuf.ByteString
        getDisplayNameBytes();

    // repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    java.util.List 
        getCountersList();
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto getCounters(int index);
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    int getCountersCount();
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    java.util.List 
        getCountersOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder getCountersOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.CounterGroupProto}
   */
  public static final class CounterGroupProto extends
      com.google.protobuf.GeneratedMessage
      implements CounterGroupProtoOrBuilder {
    // Use CounterGroupProto.newBuilder() to construct.
    private CounterGroupProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private CounterGroupProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final CounterGroupProto defaultInstance;
    public static CounterGroupProto getDefaultInstance() {
      return defaultInstance;
    }

    public CounterGroupProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private CounterGroupProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              name_ = input.readBytes();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              displayName_ = input.readBytes();
              break;
            }
            case 26: {
              if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
                counters_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000004;
              }
              counters_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.PARSER, extensionRegistry));
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
          counters_ = java.util.Collections.unmodifiableList(counters_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterGroupProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterGroupProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public CounterGroupProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new CounterGroupProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional string name = 1;
    public static final int NAME_FIELD_NUMBER = 1;
    private java.lang.Object name_;
    /**
     * optional string name = 1;
     */
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional string name = 1;
     */
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * optional string name = 1;
     */
    public com.google.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string display_name = 2;
    public static final int DISPLAY_NAME_FIELD_NUMBER = 2;
    private java.lang.Object displayName_;
    /**
     * optional string display_name = 2;
     */
    public boolean hasDisplayName() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional string display_name = 2;
     */
    public java.lang.String getDisplayName() {
      java.lang.Object ref = displayName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          displayName_ = s;
        }
        return s;
      }
    }
    /**
     * optional string display_name = 2;
     */
    public com.google.protobuf.ByteString
        getDisplayNameBytes() {
      java.lang.Object ref = displayName_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        displayName_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
    public static final int COUNTERS_FIELD_NUMBER = 3;
    private java.util.List counters_;
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    public java.util.List getCountersList() {
      return counters_;
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    public java.util.List 
        getCountersOrBuilderList() {
      return counters_;
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    public int getCountersCount() {
      return counters_.size();
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto getCounters(int index) {
      return counters_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder getCountersOrBuilder(
        int index) {
      return counters_.get(index);
    }

    private void initFields() {
      name_ = "";
      displayName_ = "";
      counters_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getNameBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, getDisplayNameBytes());
      }
      for (int i = 0; i < counters_.size(); i++) {
        output.writeMessage(3, counters_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getNameBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getDisplayNameBytes());
      }
      for (int i = 0; i < counters_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, counters_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto) obj;

      boolean result = true;
      result = result && (hasName() == other.hasName());
      if (hasName()) {
        result = result && getName()
            .equals(other.getName());
      }
      result = result && (hasDisplayName() == other.hasDisplayName());
      if (hasDisplayName()) {
        result = result && getDisplayName()
            .equals(other.getDisplayName());
      }
      result = result && getCountersList()
          .equals(other.getCountersList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasDisplayName()) {
        hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getDisplayName().hashCode();
      }
      if (getCountersCount() > 0) {
        hash = (37 * hash) + COUNTERS_FIELD_NUMBER;
        hash = (53 * hash) + getCountersList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.CounterGroupProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterGroupProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterGroupProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCountersFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        name_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        displayName_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        if (countersBuilder_ == null) {
          counters_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
        } else {
          countersBuilder_.clear();
        }
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CounterGroupProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.name_ = name_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.displayName_ = displayName_;
        if (countersBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004)) {
            counters_ = java.util.Collections.unmodifiableList(counters_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.counters_ = counters_;
        } else {
          result.counters_ = countersBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance()) return this;
        if (other.hasName()) {
          bitField0_ |= 0x00000001;
          name_ = other.name_;
          onChanged();
        }
        if (other.hasDisplayName()) {
          bitField0_ |= 0x00000002;
          displayName_ = other.displayName_;
          onChanged();
        }
        if (countersBuilder_ == null) {
          if (!other.counters_.isEmpty()) {
            if (counters_.isEmpty()) {
              counters_ = other.counters_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureCountersIsMutable();
              counters_.addAll(other.counters_);
            }
            onChanged();
          }
        } else {
          if (!other.counters_.isEmpty()) {
            if (countersBuilder_.isEmpty()) {
              countersBuilder_.dispose();
              countersBuilder_ = null;
              counters_ = other.counters_;
              bitField0_ = (bitField0_ & ~0x00000004);
              countersBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getCountersFieldBuilder() : null;
            } else {
              countersBuilder_.addAllMessages(other.counters_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional string name = 1;
      private java.lang.Object name_ = "";
      /**
       * optional string name = 1;
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional string name = 1;
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          name_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string name = 1;
       */
      public com.google.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string name = 1;
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        name_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string name = 1;
       */
      public Builder clearName() {
        bitField0_ = (bitField0_ & ~0x00000001);
        name_ = getDefaultInstance().getName();
        onChanged();
        return this;
      }
      /**
       * optional string name = 1;
       */
      public Builder setNameBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        name_ = value;
        onChanged();
        return this;
      }

      // optional string display_name = 2;
      private java.lang.Object displayName_ = "";
      /**
       * optional string display_name = 2;
       */
      public boolean hasDisplayName() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional string display_name = 2;
       */
      public java.lang.String getDisplayName() {
        java.lang.Object ref = displayName_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          displayName_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string display_name = 2;
       */
      public com.google.protobuf.ByteString
          getDisplayNameBytes() {
        java.lang.Object ref = displayName_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          displayName_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string display_name = 2;
       */
      public Builder setDisplayName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        displayName_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string display_name = 2;
       */
      public Builder clearDisplayName() {
        bitField0_ = (bitField0_ & ~0x00000002);
        displayName_ = getDefaultInstance().getDisplayName();
        onChanged();
        return this;
      }
      /**
       * optional string display_name = 2;
       */
      public Builder setDisplayNameBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        displayName_ = value;
        onChanged();
        return this;
      }

      // repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
      private java.util.List counters_ =
        java.util.Collections.emptyList();
      private void ensureCountersIsMutable() {
        if (!((bitField0_ & 0x00000004) == 0x00000004)) {
          counters_ = new java.util.ArrayList(counters_);
          bitField0_ |= 0x00000004;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder> countersBuilder_;

      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public java.util.List getCountersList() {
        if (countersBuilder_ == null) {
          return java.util.Collections.unmodifiableList(counters_);
        } else {
          return countersBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public int getCountersCount() {
        if (countersBuilder_ == null) {
          return counters_.size();
        } else {
          return countersBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto getCounters(int index) {
        if (countersBuilder_ == null) {
          return counters_.get(index);
        } else {
          return countersBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder setCounters(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCountersIsMutable();
          counters_.set(index, value);
          onChanged();
        } else {
          countersBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder setCounters(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          ensureCountersIsMutable();
          counters_.set(index, builderForValue.build());
          onChanged();
        } else {
          countersBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder addCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCountersIsMutable();
          counters_.add(value);
          onChanged();
        } else {
          countersBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder addCounters(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCountersIsMutable();
          counters_.add(index, value);
          onChanged();
        } else {
          countersBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder addCounters(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          ensureCountersIsMutable();
          counters_.add(builderForValue.build());
          onChanged();
        } else {
          countersBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder addCounters(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          ensureCountersIsMutable();
          counters_.add(index, builderForValue.build());
          onChanged();
        } else {
          countersBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder addAllCounters(
          java.lang.Iterable values) {
        if (countersBuilder_ == null) {
          ensureCountersIsMutable();
          super.addAll(values, counters_);
          onChanged();
        } else {
          countersBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder clearCounters() {
        if (countersBuilder_ == null) {
          counters_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          countersBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public Builder removeCounters(int index) {
        if (countersBuilder_ == null) {
          ensureCountersIsMutable();
          counters_.remove(index);
          onChanged();
        } else {
          countersBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder getCountersBuilder(
          int index) {
        return getCountersFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder getCountersOrBuilder(
          int index) {
        if (countersBuilder_ == null) {
          return counters_.get(index);  } else {
          return countersBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public java.util.List 
           getCountersOrBuilderList() {
        if (countersBuilder_ != null) {
          return countersBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(counters_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder addCountersBuilder() {
        return getCountersFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder addCountersBuilder(
          int index) {
        return getCountersFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
       */
      public java.util.List 
           getCountersBuilderList() {
        return getCountersFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder> 
          getCountersFieldBuilder() {
        if (countersBuilder_ == null) {
          countersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder>(
                  counters_,
                  ((bitField0_ & 0x00000004) == 0x00000004),
                  getParentForChildren(),
                  isClean());
          counters_ = null;
        }
        return countersBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.CounterGroupProto)
    }

    static {
      defaultInstance = new CounterGroupProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.CounterGroupProto)
  }

  public interface CountersProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    java.util.List 
        getCounterGroupsList();
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto getCounterGroups(int index);
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    int getCounterGroupsCount();
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    java.util.List 
        getCounterGroupsOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder getCounterGroupsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.CountersProto}
   */
  public static final class CountersProto extends
      com.google.protobuf.GeneratedMessage
      implements CountersProtoOrBuilder {
    // Use CountersProto.newBuilder() to construct.
    private CountersProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private CountersProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final CountersProto defaultInstance;
    public static CountersProto getDefaultInstance() {
      return defaultInstance;
    }

    public CountersProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private CountersProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
                counterGroups_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000001;
              }
              counterGroups_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.PARSER, extensionRegistry));
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
          counterGroups_ = java.util.Collections.unmodifiableList(counterGroups_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CountersProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CountersProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public CountersProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new CountersProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    // repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
    public static final int COUNTER_GROUPS_FIELD_NUMBER = 1;
    private java.util.List counterGroups_;
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    public java.util.List getCounterGroupsList() {
      return counterGroups_;
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    public java.util.List 
        getCounterGroupsOrBuilderList() {
      return counterGroups_;
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    public int getCounterGroupsCount() {
      return counterGroups_.size();
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto getCounterGroups(int index) {
      return counterGroups_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder getCounterGroupsOrBuilder(
        int index) {
      return counterGroups_.get(index);
    }

    private void initFields() {
      counterGroups_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < counterGroups_.size(); i++) {
        output.writeMessage(1, counterGroups_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < counterGroups_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, counterGroups_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto) obj;

      boolean result = true;
      result = result && getCounterGroupsList()
          .equals(other.getCounterGroupsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getCounterGroupsCount() > 0) {
        hash = (37 * hash) + COUNTER_GROUPS_FIELD_NUMBER;
        hash = (53 * hash) + getCounterGroupsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.CountersProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CountersProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CountersProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCounterGroupsFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (counterGroupsBuilder_ == null) {
          counterGroups_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          counterGroupsBuilder_.clear();
        }
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_CountersProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto(this);
        int from_bitField0_ = bitField0_;
        if (counterGroupsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            counterGroups_ = java.util.Collections.unmodifiableList(counterGroups_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.counterGroups_ = counterGroups_;
        } else {
          result.counterGroups_ = counterGroupsBuilder_.build();
        }
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance()) return this;
        if (counterGroupsBuilder_ == null) {
          if (!other.counterGroups_.isEmpty()) {
            if (counterGroups_.isEmpty()) {
              counterGroups_ = other.counterGroups_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureCounterGroupsIsMutable();
              counterGroups_.addAll(other.counterGroups_);
            }
            onChanged();
          }
        } else {
          if (!other.counterGroups_.isEmpty()) {
            if (counterGroupsBuilder_.isEmpty()) {
              counterGroupsBuilder_.dispose();
              counterGroupsBuilder_ = null;
              counterGroups_ = other.counterGroups_;
              bitField0_ = (bitField0_ & ~0x00000001);
              counterGroupsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getCounterGroupsFieldBuilder() : null;
            } else {
              counterGroupsBuilder_.addAllMessages(other.counterGroups_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
      private java.util.List counterGroups_ =
        java.util.Collections.emptyList();
      private void ensureCounterGroupsIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          counterGroups_ = new java.util.ArrayList(counterGroups_);
          bitField0_ |= 0x00000001;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder> counterGroupsBuilder_;

      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public java.util.List getCounterGroupsList() {
        if (counterGroupsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(counterGroups_);
        } else {
          return counterGroupsBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public int getCounterGroupsCount() {
        if (counterGroupsBuilder_ == null) {
          return counterGroups_.size();
        } else {
          return counterGroupsBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto getCounterGroups(int index) {
        if (counterGroupsBuilder_ == null) {
          return counterGroups_.get(index);
        } else {
          return counterGroupsBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder setCounterGroups(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto value) {
        if (counterGroupsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCounterGroupsIsMutable();
          counterGroups_.set(index, value);
          onChanged();
        } else {
          counterGroupsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder setCounterGroups(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder builderForValue) {
        if (counterGroupsBuilder_ == null) {
          ensureCounterGroupsIsMutable();
          counterGroups_.set(index, builderForValue.build());
          onChanged();
        } else {
          counterGroupsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder addCounterGroups(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto value) {
        if (counterGroupsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCounterGroupsIsMutable();
          counterGroups_.add(value);
          onChanged();
        } else {
          counterGroupsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder addCounterGroups(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto value) {
        if (counterGroupsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCounterGroupsIsMutable();
          counterGroups_.add(index, value);
          onChanged();
        } else {
          counterGroupsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder addCounterGroups(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder builderForValue) {
        if (counterGroupsBuilder_ == null) {
          ensureCounterGroupsIsMutable();
          counterGroups_.add(builderForValue.build());
          onChanged();
        } else {
          counterGroupsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder addCounterGroups(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder builderForValue) {
        if (counterGroupsBuilder_ == null) {
          ensureCounterGroupsIsMutable();
          counterGroups_.add(index, builderForValue.build());
          onChanged();
        } else {
          counterGroupsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder addAllCounterGroups(
          java.lang.Iterable values) {
        if (counterGroupsBuilder_ == null) {
          ensureCounterGroupsIsMutable();
          super.addAll(values, counterGroups_);
          onChanged();
        } else {
          counterGroupsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder clearCounterGroups() {
        if (counterGroupsBuilder_ == null) {
          counterGroups_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          counterGroupsBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public Builder removeCounterGroups(int index) {
        if (counterGroupsBuilder_ == null) {
          ensureCounterGroupsIsMutable();
          counterGroups_.remove(index);
          onChanged();
        } else {
          counterGroupsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder getCounterGroupsBuilder(
          int index) {
        return getCounterGroupsFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder getCounterGroupsOrBuilder(
          int index) {
        if (counterGroupsBuilder_ == null) {
          return counterGroups_.get(index);  } else {
          return counterGroupsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public java.util.List 
           getCounterGroupsOrBuilderList() {
        if (counterGroupsBuilder_ != null) {
          return counterGroupsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(counterGroups_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder addCounterGroupsBuilder() {
        return getCounterGroupsFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder addCounterGroupsBuilder(
          int index) {
        return getCounterGroupsFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
       */
      public java.util.List 
           getCounterGroupsBuilderList() {
        return getCounterGroupsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder> 
          getCounterGroupsFieldBuilder() {
        if (counterGroupsBuilder_ == null) {
          counterGroupsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder>(
                  counterGroups_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          counterGroups_ = null;
        }
        return counterGroupsBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.CountersProto)
    }

    static {
      defaultInstance = new CountersProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.CountersProto)
  }

  public interface TaskReportProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    boolean hasTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();

    // optional .hadoop.mapreduce.TaskStateProto task_state = 2;
    /**
     * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
     */
    boolean hasTaskState();
    /**
     * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto getTaskState();

    // optional float progress = 3;
    /**
     * optional float progress = 3;
     */
    boolean hasProgress();
    /**
     * optional float progress = 3;
     */
    float getProgress();

    // optional int64 start_time = 4;
    /**
     * optional int64 start_time = 4;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 4;
     */
    long getStartTime();

    // optional int64 finish_time = 5;
    /**
     * optional int64 finish_time = 5;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 5;
     */
    long getFinishTime();

    // optional .hadoop.mapreduce.CountersProto counters = 6;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    boolean hasCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder();

    // repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    java.util.List 
        getRunningAttemptsList();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getRunningAttempts(int index);
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    int getRunningAttemptsCount();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    java.util.List 
        getRunningAttemptsOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getRunningAttemptsOrBuilder(
        int index);

    // optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    boolean hasSuccessfulAttempt();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getSuccessfulAttempt();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getSuccessfulAttemptOrBuilder();

    // repeated string diagnostics = 9;
    /**
     * repeated string diagnostics = 9;
     */
    java.util.List
    getDiagnosticsList();
    /**
     * repeated string diagnostics = 9;
     */
    int getDiagnosticsCount();
    /**
     * repeated string diagnostics = 9;
     */
    java.lang.String getDiagnostics(int index);
    /**
     * repeated string diagnostics = 9;
     */
    com.google.protobuf.ByteString
        getDiagnosticsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.TaskReportProto}
   */
  public static final class TaskReportProto extends
      com.google.protobuf.GeneratedMessage
      implements TaskReportProtoOrBuilder {
    // Use TaskReportProto.newBuilder() to construct.
    private TaskReportProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private TaskReportProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final TaskReportProto defaultInstance;
    public static TaskReportProto getDefaultInstance() {
      return defaultInstance;
    }

    public TaskReportProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private TaskReportProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskId_.toBuilder();
              }
              taskId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskId_);
                taskId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                taskState_ = value;
              }
              break;
            }
            case 29: {
              bitField0_ |= 0x00000004;
              progress_ = input.readFloat();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              startTime_ = input.readInt64();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              finishTime_ = input.readInt64();
              break;
            }
            case 50: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000020) == 0x00000020)) {
                subBuilder = counters_.toBuilder();
              }
              counters_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(counters_);
                counters_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000020;
              break;
            }
            case 58: {
              if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
                runningAttempts_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000040;
              }
              runningAttempts_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry));
              break;
            }
            case 66: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000040) == 0x00000040)) {
                subBuilder = successfulAttempt_.toBuilder();
              }
              successfulAttempt_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(successfulAttempt_);
                successfulAttempt_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000040;
              break;
            }
            case 74: {
              if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
                diagnostics_ = new com.google.protobuf.LazyStringArrayList();
                mutable_bitField0_ |= 0x00000100;
              }
              diagnostics_.add(input.readBytes());
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
          runningAttempts_ = java.util.Collections.unmodifiableList(runningAttempts_);
        }
        if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
          diagnostics_ = new com.google.protobuf.UnmodifiableLazyStringList(diagnostics_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskReportProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public TaskReportProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new TaskReportProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    public static final int TASK_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public boolean hasTaskId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
      return taskId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
      return taskId_;
    }

    // optional .hadoop.mapreduce.TaskStateProto task_state = 2;
    public static final int TASK_STATE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto taskState_;
    /**
     * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
     */
    public boolean hasTaskState() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto getTaskState() {
      return taskState_;
    }

    // optional float progress = 3;
    public static final int PROGRESS_FIELD_NUMBER = 3;
    private float progress_;
    /**
     * optional float progress = 3;
     */
    public boolean hasProgress() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional float progress = 3;
     */
    public float getProgress() {
      return progress_;
    }

    // optional int64 start_time = 4;
    public static final int START_TIME_FIELD_NUMBER = 4;
    private long startTime_;
    /**
     * optional int64 start_time = 4;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    /**
     * optional int64 start_time = 4;
     */
    public long getStartTime() {
      return startTime_;
    }

    // optional int64 finish_time = 5;
    public static final int FINISH_TIME_FIELD_NUMBER = 5;
    private long finishTime_;
    /**
     * optional int64 finish_time = 5;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    /**
     * optional int64 finish_time = 5;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    // optional .hadoop.mapreduce.CountersProto counters = 6;
    public static final int COUNTERS_FIELD_NUMBER = 6;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public boolean hasCounters() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
      return counters_;
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
      return counters_;
    }

    // repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
    public static final int RUNNING_ATTEMPTS_FIELD_NUMBER = 7;
    private java.util.List runningAttempts_;
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    public java.util.List getRunningAttemptsList() {
      return runningAttempts_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    public java.util.List 
        getRunningAttemptsOrBuilderList() {
      return runningAttempts_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    public int getRunningAttemptsCount() {
      return runningAttempts_.size();
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getRunningAttempts(int index) {
      return runningAttempts_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getRunningAttemptsOrBuilder(
        int index) {
      return runningAttempts_.get(index);
    }

    // optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
    public static final int SUCCESSFUL_ATTEMPT_FIELD_NUMBER = 8;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto successfulAttempt_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    public boolean hasSuccessfulAttempt() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getSuccessfulAttempt() {
      return successfulAttempt_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getSuccessfulAttemptOrBuilder() {
      return successfulAttempt_;
    }

    // repeated string diagnostics = 9;
    public static final int DIAGNOSTICS_FIELD_NUMBER = 9;
    private com.google.protobuf.LazyStringList diagnostics_;
    /**
     * repeated string diagnostics = 9;
     */
    public java.util.List
        getDiagnosticsList() {
      return diagnostics_;
    }
    /**
     * repeated string diagnostics = 9;
     */
    public int getDiagnosticsCount() {
      return diagnostics_.size();
    }
    /**
     * repeated string diagnostics = 9;
     */
    public java.lang.String getDiagnostics(int index) {
      return diagnostics_.get(index);
    }
    /**
     * repeated string diagnostics = 9;
     */
    public com.google.protobuf.ByteString
        getDiagnosticsBytes(int index) {
      return diagnostics_.getByteString(index);
    }

    private void initFields() {
      taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      taskState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto.TS_NEW;
      progress_ = 0F;
      startTime_ = 0L;
      finishTime_ = 0L;
      counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      runningAttempts_ = java.util.Collections.emptyList();
      successfulAttempt_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, taskState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeFloat(3, progress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeInt64(4, startTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeInt64(5, finishTime_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeMessage(6, counters_);
      }
      for (int i = 0; i < runningAttempts_.size(); i++) {
        output.writeMessage(7, runningAttempts_.get(i));
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeMessage(8, successfulAttempt_);
      }
      for (int i = 0; i < diagnostics_.size(); i++) {
        output.writeBytes(9, diagnostics_.getByteString(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, taskState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(3, progress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(4, startTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(5, finishTime_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(6, counters_);
      }
      for (int i = 0; i < runningAttempts_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(7, runningAttempts_.get(i));
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(8, successfulAttempt_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < diagnostics_.size(); i++) {
          dataSize += com.google.protobuf.CodedOutputStream
            .computeBytesSizeNoTag(diagnostics_.getByteString(i));
        }
        size += dataSize;
        size += 1 * getDiagnosticsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto) obj;

      boolean result = true;
      result = result && (hasTaskId() == other.hasTaskId());
      if (hasTaskId()) {
        result = result && getTaskId()
            .equals(other.getTaskId());
      }
      result = result && (hasTaskState() == other.hasTaskState());
      if (hasTaskState()) {
        result = result &&
            (getTaskState() == other.getTaskState());
      }
      result = result && (hasProgress() == other.hasProgress());
      if (hasProgress()) {
        result = result && (Float.floatToIntBits(getProgress())    == Float.floatToIntBits(other.getProgress()));
      }
      result = result && (hasStartTime() == other.hasStartTime());
      if (hasStartTime()) {
        result = result && (getStartTime()
            == other.getStartTime());
      }
      result = result && (hasFinishTime() == other.hasFinishTime());
      if (hasFinishTime()) {
        result = result && (getFinishTime()
            == other.getFinishTime());
      }
      result = result && (hasCounters() == other.hasCounters());
      if (hasCounters()) {
        result = result && getCounters()
            .equals(other.getCounters());
      }
      result = result && getRunningAttemptsList()
          .equals(other.getRunningAttemptsList());
      result = result && (hasSuccessfulAttempt() == other.hasSuccessfulAttempt());
      if (hasSuccessfulAttempt()) {
        result = result && getSuccessfulAttempt()
            .equals(other.getSuccessfulAttempt());
      }
      result = result && getDiagnosticsList()
          .equals(other.getDiagnosticsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskId()) {
        hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskId().hashCode();
      }
      if (hasTaskState()) {
        hash = (37 * hash) + TASK_STATE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getTaskState());
      }
      if (hasProgress()) {
        hash = (37 * hash) + PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getProgress());
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getFinishTime());
      }
      if (hasCounters()) {
        hash = (37 * hash) + COUNTERS_FIELD_NUMBER;
        hash = (53 * hash) + getCounters().hashCode();
      }
      if (getRunningAttemptsCount() > 0) {
        hash = (37 * hash) + RUNNING_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getRunningAttemptsList().hashCode();
      }
      if (hasSuccessfulAttempt()) {
        hash = (37 * hash) + SUCCESSFUL_ATTEMPT_FIELD_NUMBER;
        hash = (53 * hash) + getSuccessfulAttempt().hashCode();
      }
      if (getDiagnosticsCount() > 0) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.TaskReportProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskReportProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskIdFieldBuilder();
          getCountersFieldBuilder();
          getRunningAttemptsFieldBuilder();
          getSuccessfulAttemptFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        taskState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto.TS_NEW;
        bitField0_ = (bitField0_ & ~0x00000002);
        progress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000004);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        if (runningAttemptsBuilder_ == null) {
          runningAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
        } else {
          runningAttemptsBuilder_.clear();
        }
        if (successfulAttemptBuilder_ == null) {
          successfulAttempt_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          successfulAttemptBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000080);
        diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskReportProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskIdBuilder_ == null) {
          result.taskId_ = taskId_;
        } else {
          result.taskId_ = taskIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.taskState_ = taskState_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.progress_ = progress_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.startTime_ = startTime_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.finishTime_ = finishTime_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        if (countersBuilder_ == null) {
          result.counters_ = counters_;
        } else {
          result.counters_ = countersBuilder_.build();
        }
        if (runningAttemptsBuilder_ == null) {
          if (((bitField0_ & 0x00000040) == 0x00000040)) {
            runningAttempts_ = java.util.Collections.unmodifiableList(runningAttempts_);
            bitField0_ = (bitField0_ & ~0x00000040);
          }
          result.runningAttempts_ = runningAttempts_;
        } else {
          result.runningAttempts_ = runningAttemptsBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000040;
        }
        if (successfulAttemptBuilder_ == null) {
          result.successfulAttempt_ = successfulAttempt_;
        } else {
          result.successfulAttempt_ = successfulAttemptBuilder_.build();
        }
        if (((bitField0_ & 0x00000100) == 0x00000100)) {
          diagnostics_ = new com.google.protobuf.UnmodifiableLazyStringList(
              diagnostics_);
          bitField0_ = (bitField0_ & ~0x00000100);
        }
        result.diagnostics_ = diagnostics_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) return this;
        if (other.hasTaskId()) {
          mergeTaskId(other.getTaskId());
        }
        if (other.hasTaskState()) {
          setTaskState(other.getTaskState());
        }
        if (other.hasProgress()) {
          setProgress(other.getProgress());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasCounters()) {
          mergeCounters(other.getCounters());
        }
        if (runningAttemptsBuilder_ == null) {
          if (!other.runningAttempts_.isEmpty()) {
            if (runningAttempts_.isEmpty()) {
              runningAttempts_ = other.runningAttempts_;
              bitField0_ = (bitField0_ & ~0x00000040);
            } else {
              ensureRunningAttemptsIsMutable();
              runningAttempts_.addAll(other.runningAttempts_);
            }
            onChanged();
          }
        } else {
          if (!other.runningAttempts_.isEmpty()) {
            if (runningAttemptsBuilder_.isEmpty()) {
              runningAttemptsBuilder_.dispose();
              runningAttemptsBuilder_ = null;
              runningAttempts_ = other.runningAttempts_;
              bitField0_ = (bitField0_ & ~0x00000040);
              runningAttemptsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getRunningAttemptsFieldBuilder() : null;
            } else {
              runningAttemptsBuilder_.addAllMessages(other.runningAttempts_);
            }
          }
        }
        if (other.hasSuccessfulAttempt()) {
          mergeSuccessfulAttempt(other.getSuccessfulAttempt());
        }
        if (!other.diagnostics_.isEmpty()) {
          if (diagnostics_.isEmpty()) {
            diagnostics_ = other.diagnostics_;
            bitField0_ = (bitField0_ & ~0x00000100);
          } else {
            ensureDiagnosticsIsMutable();
            diagnostics_.addAll(other.diagnostics_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public boolean hasTaskId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
        if (taskIdBuilder_ == null) {
          return taskId_;
        } else {
          return taskIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskId_ = value;
          onChanged();
        } else {
          taskIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
        if (taskIdBuilder_ == null) {
          taskId_ = builderForValue.build();
          onChanged();
        } else {
          taskIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
            taskId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.newBuilder(taskId_).mergeFrom(value).buildPartial();
          } else {
            taskId_ = value;
          }
          onChanged();
        } else {
          taskIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder clearTaskId() {
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
        if (taskIdBuilder_ != null) {
          return taskIdBuilder_.getMessageOrBuilder();
        } else {
          return taskId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> 
          getTaskIdFieldBuilder() {
        if (taskIdBuilder_ == null) {
          taskIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
                  taskId_,
                  getParentForChildren(),
                  isClean());
          taskId_ = null;
        }
        return taskIdBuilder_;
      }

      // optional .hadoop.mapreduce.TaskStateProto task_state = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto taskState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto.TS_NEW;
      /**
       * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
       */
      public boolean hasTaskState() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto getTaskState() {
        return taskState_;
      }
      /**
       * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
       */
      public Builder setTaskState(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        taskState_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskStateProto task_state = 2;
       */
      public Builder clearTaskState() {
        bitField0_ = (bitField0_ & ~0x00000002);
        taskState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskStateProto.TS_NEW;
        onChanged();
        return this;
      }

      // optional float progress = 3;
      private float progress_ ;
      /**
       * optional float progress = 3;
       */
      public boolean hasProgress() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional float progress = 3;
       */
      public float getProgress() {
        return progress_;
      }
      /**
       * optional float progress = 3;
       */
      public Builder setProgress(float value) {
        bitField0_ |= 0x00000004;
        progress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float progress = 3;
       */
      public Builder clearProgress() {
        bitField0_ = (bitField0_ & ~0x00000004);
        progress_ = 0F;
        onChanged();
        return this;
      }

      // optional int64 start_time = 4;
      private long startTime_ ;
      /**
       * optional int64 start_time = 4;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * optional int64 start_time = 4;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 4;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000008;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 4;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      // optional int64 finish_time = 5;
      private long finishTime_ ;
      /**
       * optional int64 finish_time = 5;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      /**
       * optional int64 finish_time = 5;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 5;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000010;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 5;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000010);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      // optional .hadoop.mapreduce.CountersProto counters = 6;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> countersBuilder_;
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public boolean hasCounters() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
        if (countersBuilder_ == null) {
          return counters_;
        } else {
          return countersBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder setCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          counters_ = value;
          onChanged();
        } else {
          countersBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder setCounters(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          counters_ = builderForValue.build();
          onChanged();
        } else {
          countersBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder mergeCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (((bitField0_ & 0x00000020) == 0x00000020) &&
              counters_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance()) {
            counters_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.newBuilder(counters_).mergeFrom(value).buildPartial();
          } else {
            counters_ = value;
          }
          onChanged();
        } else {
          countersBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder clearCounters() {
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
          onChanged();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder getCountersBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getCountersFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
        if (countersBuilder_ != null) {
          return countersBuilder_.getMessageOrBuilder();
        } else {
          return counters_;
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> 
          getCountersFieldBuilder() {
        if (countersBuilder_ == null) {
          countersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder>(
                  counters_,
                  getParentForChildren(),
                  isClean());
          counters_ = null;
        }
        return countersBuilder_;
      }

      // repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
      private java.util.List runningAttempts_ =
        java.util.Collections.emptyList();
      private void ensureRunningAttemptsIsMutable() {
        if (!((bitField0_ & 0x00000040) == 0x00000040)) {
          runningAttempts_ = new java.util.ArrayList(runningAttempts_);
          bitField0_ |= 0x00000040;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> runningAttemptsBuilder_;

      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public java.util.List getRunningAttemptsList() {
        if (runningAttemptsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(runningAttempts_);
        } else {
          return runningAttemptsBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public int getRunningAttemptsCount() {
        if (runningAttemptsBuilder_ == null) {
          return runningAttempts_.size();
        } else {
          return runningAttemptsBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getRunningAttempts(int index) {
        if (runningAttemptsBuilder_ == null) {
          return runningAttempts_.get(index);
        } else {
          return runningAttemptsBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder setRunningAttempts(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (runningAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRunningAttemptsIsMutable();
          runningAttempts_.set(index, value);
          onChanged();
        } else {
          runningAttemptsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder setRunningAttempts(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (runningAttemptsBuilder_ == null) {
          ensureRunningAttemptsIsMutable();
          runningAttempts_.set(index, builderForValue.build());
          onChanged();
        } else {
          runningAttemptsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder addRunningAttempts(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (runningAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRunningAttemptsIsMutable();
          runningAttempts_.add(value);
          onChanged();
        } else {
          runningAttemptsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder addRunningAttempts(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (runningAttemptsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureRunningAttemptsIsMutable();
          runningAttempts_.add(index, value);
          onChanged();
        } else {
          runningAttemptsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder addRunningAttempts(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (runningAttemptsBuilder_ == null) {
          ensureRunningAttemptsIsMutable();
          runningAttempts_.add(builderForValue.build());
          onChanged();
        } else {
          runningAttemptsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder addRunningAttempts(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (runningAttemptsBuilder_ == null) {
          ensureRunningAttemptsIsMutable();
          runningAttempts_.add(index, builderForValue.build());
          onChanged();
        } else {
          runningAttemptsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder addAllRunningAttempts(
          java.lang.Iterable values) {
        if (runningAttemptsBuilder_ == null) {
          ensureRunningAttemptsIsMutable();
          super.addAll(values, runningAttempts_);
          onChanged();
        } else {
          runningAttemptsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder clearRunningAttempts() {
        if (runningAttemptsBuilder_ == null) {
          runningAttempts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
          onChanged();
        } else {
          runningAttemptsBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public Builder removeRunningAttempts(int index) {
        if (runningAttemptsBuilder_ == null) {
          ensureRunningAttemptsIsMutable();
          runningAttempts_.remove(index);
          onChanged();
        } else {
          runningAttemptsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getRunningAttemptsBuilder(
          int index) {
        return getRunningAttemptsFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getRunningAttemptsOrBuilder(
          int index) {
        if (runningAttemptsBuilder_ == null) {
          return runningAttempts_.get(index);  } else {
          return runningAttemptsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public java.util.List 
           getRunningAttemptsOrBuilderList() {
        if (runningAttemptsBuilder_ != null) {
          return runningAttemptsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(runningAttempts_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder addRunningAttemptsBuilder() {
        return getRunningAttemptsFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder addRunningAttemptsBuilder(
          int index) {
        return getRunningAttemptsFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
       */
      public java.util.List 
           getRunningAttemptsBuilderList() {
        return getRunningAttemptsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getRunningAttemptsFieldBuilder() {
        if (runningAttemptsBuilder_ == null) {
          runningAttemptsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  runningAttempts_,
                  ((bitField0_ & 0x00000040) == 0x00000040),
                  getParentForChildren(),
                  isClean());
          runningAttempts_ = null;
        }
        return runningAttemptsBuilder_;
      }

      // optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto successfulAttempt_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> successfulAttemptBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public boolean hasSuccessfulAttempt() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getSuccessfulAttempt() {
        if (successfulAttemptBuilder_ == null) {
          return successfulAttempt_;
        } else {
          return successfulAttemptBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public Builder setSuccessfulAttempt(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (successfulAttemptBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          successfulAttempt_ = value;
          onChanged();
        } else {
          successfulAttemptBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000080;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public Builder setSuccessfulAttempt(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (successfulAttemptBuilder_ == null) {
          successfulAttempt_ = builderForValue.build();
          onChanged();
        } else {
          successfulAttemptBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000080;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public Builder mergeSuccessfulAttempt(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (successfulAttemptBuilder_ == null) {
          if (((bitField0_ & 0x00000080) == 0x00000080) &&
              successfulAttempt_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            successfulAttempt_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(successfulAttempt_).mergeFrom(value).buildPartial();
          } else {
            successfulAttempt_ = value;
          }
          onChanged();
        } else {
          successfulAttemptBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000080;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public Builder clearSuccessfulAttempt() {
        if (successfulAttemptBuilder_ == null) {
          successfulAttempt_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          successfulAttemptBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000080);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getSuccessfulAttemptBuilder() {
        bitField0_ |= 0x00000080;
        onChanged();
        return getSuccessfulAttemptFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getSuccessfulAttemptOrBuilder() {
        if (successfulAttemptBuilder_ != null) {
          return successfulAttemptBuilder_.getMessageOrBuilder();
        } else {
          return successfulAttempt_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getSuccessfulAttemptFieldBuilder() {
        if (successfulAttemptBuilder_ == null) {
          successfulAttemptBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  successfulAttempt_,
                  getParentForChildren(),
                  isClean());
          successfulAttempt_ = null;
        }
        return successfulAttemptBuilder_;
      }

      // repeated string diagnostics = 9;
      private com.google.protobuf.LazyStringList diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      private void ensureDiagnosticsIsMutable() {
        if (!((bitField0_ & 0x00000100) == 0x00000100)) {
          diagnostics_ = new com.google.protobuf.LazyStringArrayList(diagnostics_);
          bitField0_ |= 0x00000100;
         }
      }
      /**
       * repeated string diagnostics = 9;
       */
      public java.util.List
          getDiagnosticsList() {
        return java.util.Collections.unmodifiableList(diagnostics_);
      }
      /**
       * repeated string diagnostics = 9;
       */
      public int getDiagnosticsCount() {
        return diagnostics_.size();
      }
      /**
       * repeated string diagnostics = 9;
       */
      public java.lang.String getDiagnostics(int index) {
        return diagnostics_.get(index);
      }
      /**
       * repeated string diagnostics = 9;
       */
      public com.google.protobuf.ByteString
          getDiagnosticsBytes(int index) {
        return diagnostics_.getByteString(index);
      }
      /**
       * repeated string diagnostics = 9;
       */
      public Builder setDiagnostics(
          int index, java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.set(index, value);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 9;
       */
      public Builder addDiagnostics(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.add(value);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 9;
       */
      public Builder addAllDiagnostics(
          java.lang.Iterable values) {
        ensureDiagnosticsIsMutable();
        super.addAll(values, diagnostics_);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 9;
       */
      public Builder clearDiagnostics() {
        diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000100);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 9;
       */
      public Builder addDiagnosticsBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.add(value);
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.TaskReportProto)
    }

    static {
      defaultInstance = new TaskReportProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.TaskReportProto)
  }

  public interface TaskAttemptReportProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    boolean hasTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();

    // optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
    /**
     * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
     */
    boolean hasTaskAttemptState();
    /**
     * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto getTaskAttemptState();

    // optional float progress = 3;
    /**
     * optional float progress = 3;
     */
    boolean hasProgress();
    /**
     * optional float progress = 3;
     */
    float getProgress();

    // optional int64 start_time = 4;
    /**
     * optional int64 start_time = 4;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 4;
     */
    long getStartTime();

    // optional int64 finish_time = 5;
    /**
     * optional int64 finish_time = 5;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 5;
     */
    long getFinishTime();

    // optional .hadoop.mapreduce.CountersProto counters = 6;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    boolean hasCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder();

    // optional string diagnostic_info = 7;
    /**
     * optional string diagnostic_info = 7;
     */
    boolean hasDiagnosticInfo();
    /**
     * optional string diagnostic_info = 7;
     */
    java.lang.String getDiagnosticInfo();
    /**
     * optional string diagnostic_info = 7;
     */
    com.google.protobuf.ByteString
        getDiagnosticInfoBytes();

    // optional string state_string = 8;
    /**
     * optional string state_string = 8;
     */
    boolean hasStateString();
    /**
     * optional string state_string = 8;
     */
    java.lang.String getStateString();
    /**
     * optional string state_string = 8;
     */
    com.google.protobuf.ByteString
        getStateStringBytes();

    // optional .hadoop.mapreduce.PhaseProto phase = 9;
    /**
     * optional .hadoop.mapreduce.PhaseProto phase = 9;
     */
    boolean hasPhase();
    /**
     * optional .hadoop.mapreduce.PhaseProto phase = 9;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto getPhase();

    // optional int64 shuffle_finish_time = 10;
    /**
     * optional int64 shuffle_finish_time = 10;
     */
    boolean hasShuffleFinishTime();
    /**
     * optional int64 shuffle_finish_time = 10;
     */
    long getShuffleFinishTime();

    // optional int64 sort_finish_time = 11;
    /**
     * optional int64 sort_finish_time = 11;
     */
    boolean hasSortFinishTime();
    /**
     * optional int64 sort_finish_time = 11;
     */
    long getSortFinishTime();

    // optional string node_manager_host = 12;
    /**
     * optional string node_manager_host = 12;
     */
    boolean hasNodeManagerHost();
    /**
     * optional string node_manager_host = 12;
     */
    java.lang.String getNodeManagerHost();
    /**
     * optional string node_manager_host = 12;
     */
    com.google.protobuf.ByteString
        getNodeManagerHostBytes();

    // optional int32 node_manager_port = 13;
    /**
     * optional int32 node_manager_port = 13;
     */
    boolean hasNodeManagerPort();
    /**
     * optional int32 node_manager_port = 13;
     */
    int getNodeManagerPort();

    // optional int32 node_manager_http_port = 14;
    /**
     * optional int32 node_manager_http_port = 14;
     */
    boolean hasNodeManagerHttpPort();
    /**
     * optional int32 node_manager_http_port = 14;
     */
    int getNodeManagerHttpPort();

    // optional .hadoop.yarn.ContainerIdProto container_id = 15;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    boolean hasContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.TaskAttemptReportProto}
   */
  public static final class TaskAttemptReportProto extends
      com.google.protobuf.GeneratedMessage
      implements TaskAttemptReportProtoOrBuilder {
    // Use TaskAttemptReportProto.newBuilder() to construct.
    private TaskAttemptReportProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private TaskAttemptReportProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final TaskAttemptReportProto defaultInstance;
    public static TaskAttemptReportProto getDefaultInstance() {
      return defaultInstance;
    }

    public TaskAttemptReportProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private TaskAttemptReportProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptId_.toBuilder();
              }
              taskAttemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptId_);
                taskAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                taskAttemptState_ = value;
              }
              break;
            }
            case 29: {
              bitField0_ |= 0x00000004;
              progress_ = input.readFloat();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              startTime_ = input.readInt64();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              finishTime_ = input.readInt64();
              break;
            }
            case 50: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000020) == 0x00000020)) {
                subBuilder = counters_.toBuilder();
              }
              counters_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(counters_);
                counters_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000020;
              break;
            }
            case 58: {
              bitField0_ |= 0x00000040;
              diagnosticInfo_ = input.readBytes();
              break;
            }
            case 66: {
              bitField0_ |= 0x00000080;
              stateString_ = input.readBytes();
              break;
            }
            case 72: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(9, rawValue);
              } else {
                bitField0_ |= 0x00000100;
                phase_ = value;
              }
              break;
            }
            case 80: {
              bitField0_ |= 0x00000200;
              shuffleFinishTime_ = input.readInt64();
              break;
            }
            case 88: {
              bitField0_ |= 0x00000400;
              sortFinishTime_ = input.readInt64();
              break;
            }
            case 98: {
              bitField0_ |= 0x00000800;
              nodeManagerHost_ = input.readBytes();
              break;
            }
            case 104: {
              bitField0_ |= 0x00001000;
              nodeManagerPort_ = input.readInt32();
              break;
            }
            case 112: {
              bitField0_ |= 0x00002000;
              nodeManagerHttpPort_ = input.readInt32();
              break;
            }
            case 122: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00004000) == 0x00004000)) {
                subBuilder = containerId_.toBuilder();
              }
              containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containerId_);
                containerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00004000;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public TaskAttemptReportProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new TaskAttemptReportProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public boolean hasTaskAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
      return taskAttemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
      return taskAttemptId_;
    }

    // optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
    public static final int TASK_ATTEMPT_STATE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto taskAttemptState_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
     */
    public boolean hasTaskAttemptState() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto getTaskAttemptState() {
      return taskAttemptState_;
    }

    // optional float progress = 3;
    public static final int PROGRESS_FIELD_NUMBER = 3;
    private float progress_;
    /**
     * optional float progress = 3;
     */
    public boolean hasProgress() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional float progress = 3;
     */
    public float getProgress() {
      return progress_;
    }

    // optional int64 start_time = 4;
    public static final int START_TIME_FIELD_NUMBER = 4;
    private long startTime_;
    /**
     * optional int64 start_time = 4;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    /**
     * optional int64 start_time = 4;
     */
    public long getStartTime() {
      return startTime_;
    }

    // optional int64 finish_time = 5;
    public static final int FINISH_TIME_FIELD_NUMBER = 5;
    private long finishTime_;
    /**
     * optional int64 finish_time = 5;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    /**
     * optional int64 finish_time = 5;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    // optional .hadoop.mapreduce.CountersProto counters = 6;
    public static final int COUNTERS_FIELD_NUMBER = 6;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public boolean hasCounters() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
      return counters_;
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 6;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
      return counters_;
    }

    // optional string diagnostic_info = 7;
    public static final int DIAGNOSTIC_INFO_FIELD_NUMBER = 7;
    private java.lang.Object diagnosticInfo_;
    /**
     * optional string diagnostic_info = 7;
     */
    public boolean hasDiagnosticInfo() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    /**
     * optional string diagnostic_info = 7;
     */
    public java.lang.String getDiagnosticInfo() {
      java.lang.Object ref = diagnosticInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticInfo_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostic_info = 7;
     */
    public com.google.protobuf.ByteString
        getDiagnosticInfoBytes() {
      java.lang.Object ref = diagnosticInfo_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticInfo_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string state_string = 8;
    public static final int STATE_STRING_FIELD_NUMBER = 8;
    private java.lang.Object stateString_;
    /**
     * optional string state_string = 8;
     */
    public boolean hasStateString() {
      return ((bitField0_ & 0x00000080) == 0x00000080);
    }
    /**
     * optional string state_string = 8;
     */
    public java.lang.String getStateString() {
      java.lang.Object ref = stateString_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          stateString_ = s;
        }
        return s;
      }
    }
    /**
     * optional string state_string = 8;
     */
    public com.google.protobuf.ByteString
        getStateStringBytes() {
      java.lang.Object ref = stateString_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        stateString_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional .hadoop.mapreduce.PhaseProto phase = 9;
    public static final int PHASE_FIELD_NUMBER = 9;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto phase_;
    /**
     * optional .hadoop.mapreduce.PhaseProto phase = 9;
     */
    public boolean hasPhase() {
      return ((bitField0_ & 0x00000100) == 0x00000100);
    }
    /**
     * optional .hadoop.mapreduce.PhaseProto phase = 9;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto getPhase() {
      return phase_;
    }

    // optional int64 shuffle_finish_time = 10;
    public static final int SHUFFLE_FINISH_TIME_FIELD_NUMBER = 10;
    private long shuffleFinishTime_;
    /**
     * optional int64 shuffle_finish_time = 10;
     */
    public boolean hasShuffleFinishTime() {
      return ((bitField0_ & 0x00000200) == 0x00000200);
    }
    /**
     * optional int64 shuffle_finish_time = 10;
     */
    public long getShuffleFinishTime() {
      return shuffleFinishTime_;
    }

    // optional int64 sort_finish_time = 11;
    public static final int SORT_FINISH_TIME_FIELD_NUMBER = 11;
    private long sortFinishTime_;
    /**
     * optional int64 sort_finish_time = 11;
     */
    public boolean hasSortFinishTime() {
      return ((bitField0_ & 0x00000400) == 0x00000400);
    }
    /**
     * optional int64 sort_finish_time = 11;
     */
    public long getSortFinishTime() {
      return sortFinishTime_;
    }

    // optional string node_manager_host = 12;
    public static final int NODE_MANAGER_HOST_FIELD_NUMBER = 12;
    private java.lang.Object nodeManagerHost_;
    /**
     * optional string node_manager_host = 12;
     */
    public boolean hasNodeManagerHost() {
      return ((bitField0_ & 0x00000800) == 0x00000800);
    }
    /**
     * optional string node_manager_host = 12;
     */
    public java.lang.String getNodeManagerHost() {
      java.lang.Object ref = nodeManagerHost_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeManagerHost_ = s;
        }
        return s;
      }
    }
    /**
     * optional string node_manager_host = 12;
     */
    public com.google.protobuf.ByteString
        getNodeManagerHostBytes() {
      java.lang.Object ref = nodeManagerHost_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeManagerHost_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional int32 node_manager_port = 13;
    public static final int NODE_MANAGER_PORT_FIELD_NUMBER = 13;
    private int nodeManagerPort_;
    /**
     * optional int32 node_manager_port = 13;
     */
    public boolean hasNodeManagerPort() {
      return ((bitField0_ & 0x00001000) == 0x00001000);
    }
    /**
     * optional int32 node_manager_port = 13;
     */
    public int getNodeManagerPort() {
      return nodeManagerPort_;
    }

    // optional int32 node_manager_http_port = 14;
    public static final int NODE_MANAGER_HTTP_PORT_FIELD_NUMBER = 14;
    private int nodeManagerHttpPort_;
    /**
     * optional int32 node_manager_http_port = 14;
     */
    public boolean hasNodeManagerHttpPort() {
      return ((bitField0_ & 0x00002000) == 0x00002000);
    }
    /**
     * optional int32 node_manager_http_port = 14;
     */
    public int getNodeManagerHttpPort() {
      return nodeManagerHttpPort_;
    }

    // optional .hadoop.yarn.ContainerIdProto container_id = 15;
    public static final int CONTAINER_ID_FIELD_NUMBER = 15;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00004000) == 0x00004000);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 15;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_;
    }

    private void initFields() {
      taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      taskAttemptState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto.TA_NEW;
      progress_ = 0F;
      startTime_ = 0L;
      finishTime_ = 0L;
      counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      diagnosticInfo_ = "";
      stateString_ = "";
      phase_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto.P_STARTING;
      shuffleFinishTime_ = 0L;
      sortFinishTime_ = 0L;
      nodeManagerHost_ = "";
      nodeManagerPort_ = 0;
      nodeManagerHttpPort_ = 0;
      containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, taskAttemptState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeFloat(3, progress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeInt64(4, startTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeInt64(5, finishTime_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeMessage(6, counters_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeBytes(7, getDiagnosticInfoBytes());
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        output.writeBytes(8, getStateStringBytes());
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        output.writeEnum(9, phase_.getNumber());
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        output.writeInt64(10, shuffleFinishTime_);
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        output.writeInt64(11, sortFinishTime_);
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        output.writeBytes(12, getNodeManagerHostBytes());
      }
      if (((bitField0_ & 0x00001000) == 0x00001000)) {
        output.writeInt32(13, nodeManagerPort_);
      }
      if (((bitField0_ & 0x00002000) == 0x00002000)) {
        output.writeInt32(14, nodeManagerHttpPort_);
      }
      if (((bitField0_ & 0x00004000) == 0x00004000)) {
        output.writeMessage(15, containerId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, taskAttemptState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(3, progress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(4, startTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(5, finishTime_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(6, counters_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(7, getDiagnosticInfoBytes());
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(8, getStateStringBytes());
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(9, phase_.getNumber());
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(10, shuffleFinishTime_);
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(11, sortFinishTime_);
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(12, getNodeManagerHostBytes());
      }
      if (((bitField0_ & 0x00001000) == 0x00001000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(13, nodeManagerPort_);
      }
      if (((bitField0_ & 0x00002000) == 0x00002000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(14, nodeManagerHttpPort_);
      }
      if (((bitField0_ & 0x00004000) == 0x00004000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(15, containerId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptId() == other.hasTaskAttemptId());
      if (hasTaskAttemptId()) {
        result = result && getTaskAttemptId()
            .equals(other.getTaskAttemptId());
      }
      result = result && (hasTaskAttemptState() == other.hasTaskAttemptState());
      if (hasTaskAttemptState()) {
        result = result &&
            (getTaskAttemptState() == other.getTaskAttemptState());
      }
      result = result && (hasProgress() == other.hasProgress());
      if (hasProgress()) {
        result = result && (Float.floatToIntBits(getProgress())    == Float.floatToIntBits(other.getProgress()));
      }
      result = result && (hasStartTime() == other.hasStartTime());
      if (hasStartTime()) {
        result = result && (getStartTime()
            == other.getStartTime());
      }
      result = result && (hasFinishTime() == other.hasFinishTime());
      if (hasFinishTime()) {
        result = result && (getFinishTime()
            == other.getFinishTime());
      }
      result = result && (hasCounters() == other.hasCounters());
      if (hasCounters()) {
        result = result && getCounters()
            .equals(other.getCounters());
      }
      result = result && (hasDiagnosticInfo() == other.hasDiagnosticInfo());
      if (hasDiagnosticInfo()) {
        result = result && getDiagnosticInfo()
            .equals(other.getDiagnosticInfo());
      }
      result = result && (hasStateString() == other.hasStateString());
      if (hasStateString()) {
        result = result && getStateString()
            .equals(other.getStateString());
      }
      result = result && (hasPhase() == other.hasPhase());
      if (hasPhase()) {
        result = result &&
            (getPhase() == other.getPhase());
      }
      result = result && (hasShuffleFinishTime() == other.hasShuffleFinishTime());
      if (hasShuffleFinishTime()) {
        result = result && (getShuffleFinishTime()
            == other.getShuffleFinishTime());
      }
      result = result && (hasSortFinishTime() == other.hasSortFinishTime());
      if (hasSortFinishTime()) {
        result = result && (getSortFinishTime()
            == other.getSortFinishTime());
      }
      result = result && (hasNodeManagerHost() == other.hasNodeManagerHost());
      if (hasNodeManagerHost()) {
        result = result && getNodeManagerHost()
            .equals(other.getNodeManagerHost());
      }
      result = result && (hasNodeManagerPort() == other.hasNodeManagerPort());
      if (hasNodeManagerPort()) {
        result = result && (getNodeManagerPort()
            == other.getNodeManagerPort());
      }
      result = result && (hasNodeManagerHttpPort() == other.hasNodeManagerHttpPort());
      if (hasNodeManagerHttpPort()) {
        result = result && (getNodeManagerHttpPort()
            == other.getNodeManagerHttpPort());
      }
      result = result && (hasContainerId() == other.hasContainerId());
      if (hasContainerId()) {
        result = result && getContainerId()
            .equals(other.getContainerId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptId()) {
        hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptId().hashCode();
      }
      if (hasTaskAttemptState()) {
        hash = (37 * hash) + TASK_ATTEMPT_STATE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getTaskAttemptState());
      }
      if (hasProgress()) {
        hash = (37 * hash) + PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getProgress());
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getFinishTime());
      }
      if (hasCounters()) {
        hash = (37 * hash) + COUNTERS_FIELD_NUMBER;
        hash = (53 * hash) + getCounters().hashCode();
      }
      if (hasDiagnosticInfo()) {
        hash = (37 * hash) + DIAGNOSTIC_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticInfo().hashCode();
      }
      if (hasStateString()) {
        hash = (37 * hash) + STATE_STRING_FIELD_NUMBER;
        hash = (53 * hash) + getStateString().hashCode();
      }
      if (hasPhase()) {
        hash = (37 * hash) + PHASE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getPhase());
      }
      if (hasShuffleFinishTime()) {
        hash = (37 * hash) + SHUFFLE_FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getShuffleFinishTime());
      }
      if (hasSortFinishTime()) {
        hash = (37 * hash) + SORT_FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getSortFinishTime());
      }
      if (hasNodeManagerHost()) {
        hash = (37 * hash) + NODE_MANAGER_HOST_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerHost().hashCode();
      }
      if (hasNodeManagerPort()) {
        hash = (37 * hash) + NODE_MANAGER_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerPort();
      }
      if (hasNodeManagerHttpPort()) {
        hash = (37 * hash) + NODE_MANAGER_HTTP_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerHttpPort();
      }
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.TaskAttemptReportProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptIdFieldBuilder();
          getCountersFieldBuilder();
          getContainerIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        taskAttemptState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto.TA_NEW;
        bitField0_ = (bitField0_ & ~0x00000002);
        progress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000004);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        diagnosticInfo_ = "";
        bitField0_ = (bitField0_ & ~0x00000040);
        stateString_ = "";
        bitField0_ = (bitField0_ & ~0x00000080);
        phase_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto.P_STARTING;
        bitField0_ = (bitField0_ & ~0x00000100);
        shuffleFinishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000200);
        sortFinishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000400);
        nodeManagerHost_ = "";
        bitField0_ = (bitField0_ & ~0x00000800);
        nodeManagerPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00001000);
        nodeManagerHttpPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00002000);
        if (containerIdBuilder_ == null) {
          containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00004000);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptIdBuilder_ == null) {
          result.taskAttemptId_ = taskAttemptId_;
        } else {
          result.taskAttemptId_ = taskAttemptIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.taskAttemptState_ = taskAttemptState_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.progress_ = progress_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.startTime_ = startTime_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.finishTime_ = finishTime_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        if (countersBuilder_ == null) {
          result.counters_ = counters_;
        } else {
          result.counters_ = countersBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
          to_bitField0_ |= 0x00000040;
        }
        result.diagnosticInfo_ = diagnosticInfo_;
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000080;
        }
        result.stateString_ = stateString_;
        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
          to_bitField0_ |= 0x00000100;
        }
        result.phase_ = phase_;
        if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
          to_bitField0_ |= 0x00000200;
        }
        result.shuffleFinishTime_ = shuffleFinishTime_;
        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
          to_bitField0_ |= 0x00000400;
        }
        result.sortFinishTime_ = sortFinishTime_;
        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
          to_bitField0_ |= 0x00000800;
        }
        result.nodeManagerHost_ = nodeManagerHost_;
        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
          to_bitField0_ |= 0x00001000;
        }
        result.nodeManagerPort_ = nodeManagerPort_;
        if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
          to_bitField0_ |= 0x00002000;
        }
        result.nodeManagerHttpPort_ = nodeManagerHttpPort_;
        if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
          to_bitField0_ |= 0x00004000;
        }
        if (containerIdBuilder_ == null) {
          result.containerId_ = containerId_;
        } else {
          result.containerId_ = containerIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptId()) {
          mergeTaskAttemptId(other.getTaskAttemptId());
        }
        if (other.hasTaskAttemptState()) {
          setTaskAttemptState(other.getTaskAttemptState());
        }
        if (other.hasProgress()) {
          setProgress(other.getProgress());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasCounters()) {
          mergeCounters(other.getCounters());
        }
        if (other.hasDiagnosticInfo()) {
          bitField0_ |= 0x00000040;
          diagnosticInfo_ = other.diagnosticInfo_;
          onChanged();
        }
        if (other.hasStateString()) {
          bitField0_ |= 0x00000080;
          stateString_ = other.stateString_;
          onChanged();
        }
        if (other.hasPhase()) {
          setPhase(other.getPhase());
        }
        if (other.hasShuffleFinishTime()) {
          setShuffleFinishTime(other.getShuffleFinishTime());
        }
        if (other.hasSortFinishTime()) {
          setSortFinishTime(other.getSortFinishTime());
        }
        if (other.hasNodeManagerHost()) {
          bitField0_ |= 0x00000800;
          nodeManagerHost_ = other.nodeManagerHost_;
          onChanged();
        }
        if (other.hasNodeManagerPort()) {
          setNodeManagerPort(other.getNodeManagerPort());
        }
        if (other.hasNodeManagerHttpPort()) {
          setNodeManagerHttpPort(other.getNodeManagerHttpPort());
        }
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public boolean hasTaskAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          return taskAttemptId_;
        } else {
          return taskAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptId_ = value;
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            taskAttemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(taskAttemptId_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptId_ = value;
          }
          onChanged();
        } else {
          taskAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder clearTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
        if (taskAttemptIdBuilder_ != null) {
          return taskAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getTaskAttemptIdFieldBuilder() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  taskAttemptId_,
                  getParentForChildren(),
                  isClean());
          taskAttemptId_ = null;
        }
        return taskAttemptIdBuilder_;
      }

      // optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto taskAttemptState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto.TA_NEW;
      /**
       * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
       */
      public boolean hasTaskAttemptState() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto getTaskAttemptState() {
        return taskAttemptState_;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
       */
      public Builder setTaskAttemptState(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        taskAttemptState_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
       */
      public Builder clearTaskAttemptState() {
        bitField0_ = (bitField0_ & ~0x00000002);
        taskAttemptState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptStateProto.TA_NEW;
        onChanged();
        return this;
      }

      // optional float progress = 3;
      private float progress_ ;
      /**
       * optional float progress = 3;
       */
      public boolean hasProgress() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional float progress = 3;
       */
      public float getProgress() {
        return progress_;
      }
      /**
       * optional float progress = 3;
       */
      public Builder setProgress(float value) {
        bitField0_ |= 0x00000004;
        progress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float progress = 3;
       */
      public Builder clearProgress() {
        bitField0_ = (bitField0_ & ~0x00000004);
        progress_ = 0F;
        onChanged();
        return this;
      }

      // optional int64 start_time = 4;
      private long startTime_ ;
      /**
       * optional int64 start_time = 4;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * optional int64 start_time = 4;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 4;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000008;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 4;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      // optional int64 finish_time = 5;
      private long finishTime_ ;
      /**
       * optional int64 finish_time = 5;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      /**
       * optional int64 finish_time = 5;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 5;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000010;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 5;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000010);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      // optional .hadoop.mapreduce.CountersProto counters = 6;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> countersBuilder_;
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public boolean hasCounters() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
        if (countersBuilder_ == null) {
          return counters_;
        } else {
          return countersBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder setCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          counters_ = value;
          onChanged();
        } else {
          countersBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder setCounters(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          counters_ = builderForValue.build();
          onChanged();
        } else {
          countersBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder mergeCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (((bitField0_ & 0x00000020) == 0x00000020) &&
              counters_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance()) {
            counters_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.newBuilder(counters_).mergeFrom(value).buildPartial();
          } else {
            counters_ = value;
          }
          onChanged();
        } else {
          countersBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000020;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public Builder clearCounters() {
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
          onChanged();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder getCountersBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getCountersFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
        if (countersBuilder_ != null) {
          return countersBuilder_.getMessageOrBuilder();
        } else {
          return counters_;
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 6;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> 
          getCountersFieldBuilder() {
        if (countersBuilder_ == null) {
          countersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder>(
                  counters_,
                  getParentForChildren(),
                  isClean());
          counters_ = null;
        }
        return countersBuilder_;
      }

      // optional string diagnostic_info = 7;
      private java.lang.Object diagnosticInfo_ = "";
      /**
       * optional string diagnostic_info = 7;
       */
      public boolean hasDiagnosticInfo() {
        return ((bitField0_ & 0x00000040) == 0x00000040);
      }
      /**
       * optional string diagnostic_info = 7;
       */
      public java.lang.String getDiagnosticInfo() {
        java.lang.Object ref = diagnosticInfo_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          diagnosticInfo_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostic_info = 7;
       */
      public com.google.protobuf.ByteString
          getDiagnosticInfoBytes() {
        java.lang.Object ref = diagnosticInfo_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticInfo_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostic_info = 7;
       */
      public Builder setDiagnosticInfo(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000040;
        diagnosticInfo_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostic_info = 7;
       */
      public Builder clearDiagnosticInfo() {
        bitField0_ = (bitField0_ & ~0x00000040);
        diagnosticInfo_ = getDefaultInstance().getDiagnosticInfo();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostic_info = 7;
       */
      public Builder setDiagnosticInfoBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000040;
        diagnosticInfo_ = value;
        onChanged();
        return this;
      }

      // optional string state_string = 8;
      private java.lang.Object stateString_ = "";
      /**
       * optional string state_string = 8;
       */
      public boolean hasStateString() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      /**
       * optional string state_string = 8;
       */
      public java.lang.String getStateString() {
        java.lang.Object ref = stateString_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          stateString_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string state_string = 8;
       */
      public com.google.protobuf.ByteString
          getStateStringBytes() {
        java.lang.Object ref = stateString_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          stateString_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string state_string = 8;
       */
      public Builder setStateString(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000080;
        stateString_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string state_string = 8;
       */
      public Builder clearStateString() {
        bitField0_ = (bitField0_ & ~0x00000080);
        stateString_ = getDefaultInstance().getStateString();
        onChanged();
        return this;
      }
      /**
       * optional string state_string = 8;
       */
      public Builder setStateStringBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000080;
        stateString_ = value;
        onChanged();
        return this;
      }

      // optional .hadoop.mapreduce.PhaseProto phase = 9;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto phase_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto.P_STARTING;
      /**
       * optional .hadoop.mapreduce.PhaseProto phase = 9;
       */
      public boolean hasPhase() {
        return ((bitField0_ & 0x00000100) == 0x00000100);
      }
      /**
       * optional .hadoop.mapreduce.PhaseProto phase = 9;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto getPhase() {
        return phase_;
      }
      /**
       * optional .hadoop.mapreduce.PhaseProto phase = 9;
       */
      public Builder setPhase(org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000100;
        phase_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.PhaseProto phase = 9;
       */
      public Builder clearPhase() {
        bitField0_ = (bitField0_ & ~0x00000100);
        phase_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.PhaseProto.P_STARTING;
        onChanged();
        return this;
      }

      // optional int64 shuffle_finish_time = 10;
      private long shuffleFinishTime_ ;
      /**
       * optional int64 shuffle_finish_time = 10;
       */
      public boolean hasShuffleFinishTime() {
        return ((bitField0_ & 0x00000200) == 0x00000200);
      }
      /**
       * optional int64 shuffle_finish_time = 10;
       */
      public long getShuffleFinishTime() {
        return shuffleFinishTime_;
      }
      /**
       * optional int64 shuffle_finish_time = 10;
       */
      public Builder setShuffleFinishTime(long value) {
        bitField0_ |= 0x00000200;
        shuffleFinishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 shuffle_finish_time = 10;
       */
      public Builder clearShuffleFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000200);
        shuffleFinishTime_ = 0L;
        onChanged();
        return this;
      }

      // optional int64 sort_finish_time = 11;
      private long sortFinishTime_ ;
      /**
       * optional int64 sort_finish_time = 11;
       */
      public boolean hasSortFinishTime() {
        return ((bitField0_ & 0x00000400) == 0x00000400);
      }
      /**
       * optional int64 sort_finish_time = 11;
       */
      public long getSortFinishTime() {
        return sortFinishTime_;
      }
      /**
       * optional int64 sort_finish_time = 11;
       */
      public Builder setSortFinishTime(long value) {
        bitField0_ |= 0x00000400;
        sortFinishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 sort_finish_time = 11;
       */
      public Builder clearSortFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000400);
        sortFinishTime_ = 0L;
        onChanged();
        return this;
      }

      // optional string node_manager_host = 12;
      private java.lang.Object nodeManagerHost_ = "";
      /**
       * optional string node_manager_host = 12;
       */
      public boolean hasNodeManagerHost() {
        return ((bitField0_ & 0x00000800) == 0x00000800);
      }
      /**
       * optional string node_manager_host = 12;
       */
      public java.lang.String getNodeManagerHost() {
        java.lang.Object ref = nodeManagerHost_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          nodeManagerHost_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string node_manager_host = 12;
       */
      public com.google.protobuf.ByteString
          getNodeManagerHostBytes() {
        java.lang.Object ref = nodeManagerHost_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeManagerHost_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string node_manager_host = 12;
       */
      public Builder setNodeManagerHost(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000800;
        nodeManagerHost_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string node_manager_host = 12;
       */
      public Builder clearNodeManagerHost() {
        bitField0_ = (bitField0_ & ~0x00000800);
        nodeManagerHost_ = getDefaultInstance().getNodeManagerHost();
        onChanged();
        return this;
      }
      /**
       * optional string node_manager_host = 12;
       */
      public Builder setNodeManagerHostBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000800;
        nodeManagerHost_ = value;
        onChanged();
        return this;
      }

      // optional int32 node_manager_port = 13;
      private int nodeManagerPort_ ;
      /**
       * optional int32 node_manager_port = 13;
       */
      public boolean hasNodeManagerPort() {
        return ((bitField0_ & 0x00001000) == 0x00001000);
      }
      /**
       * optional int32 node_manager_port = 13;
       */
      public int getNodeManagerPort() {
        return nodeManagerPort_;
      }
      /**
       * optional int32 node_manager_port = 13;
       */
      public Builder setNodeManagerPort(int value) {
        bitField0_ |= 0x00001000;
        nodeManagerPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 node_manager_port = 13;
       */
      public Builder clearNodeManagerPort() {
        bitField0_ = (bitField0_ & ~0x00001000);
        nodeManagerPort_ = 0;
        onChanged();
        return this;
      }

      // optional int32 node_manager_http_port = 14;
      private int nodeManagerHttpPort_ ;
      /**
       * optional int32 node_manager_http_port = 14;
       */
      public boolean hasNodeManagerHttpPort() {
        return ((bitField0_ & 0x00002000) == 0x00002000);
      }
      /**
       * optional int32 node_manager_http_port = 14;
       */
      public int getNodeManagerHttpPort() {
        return nodeManagerHttpPort_;
      }
      /**
       * optional int32 node_manager_http_port = 14;
       */
      public Builder setNodeManagerHttpPort(int value) {
        bitField0_ |= 0x00002000;
        nodeManagerHttpPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 node_manager_http_port = 14;
       */
      public Builder clearNodeManagerHttpPort() {
        bitField0_ = (bitField0_ & ~0x00002000);
        nodeManagerHttpPort_ = 0;
        onChanged();
        return this;
      }

      // optional .hadoop.yarn.ContainerIdProto container_id = 15;
      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00004000) == 0x00004000);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
          onChanged();
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00004000;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
          onChanged();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00004000;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00004000) == 0x00004000) &&
              containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            containerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial();
          } else {
            containerId_ = value;
          }
          onChanged();
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00004000;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00004000);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00004000;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 15;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  containerId_,
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.TaskAttemptReportProto)
    }

    static {
      defaultInstance = new TaskAttemptReportProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.TaskAttemptReportProto)
  }

  public interface JobReportProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();

    // optional .hadoop.mapreduce.JobStateProto job_state = 2;
    /**
     * optional .hadoop.mapreduce.JobStateProto job_state = 2;
     */
    boolean hasJobState();
    /**
     * optional .hadoop.mapreduce.JobStateProto job_state = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto getJobState();

    // optional float map_progress = 3;
    /**
     * optional float map_progress = 3;
     */
    boolean hasMapProgress();
    /**
     * optional float map_progress = 3;
     */
    float getMapProgress();

    // optional float reduce_progress = 4;
    /**
     * optional float reduce_progress = 4;
     */
    boolean hasReduceProgress();
    /**
     * optional float reduce_progress = 4;
     */
    float getReduceProgress();

    // optional float cleanup_progress = 5;
    /**
     * optional float cleanup_progress = 5;
     */
    boolean hasCleanupProgress();
    /**
     * optional float cleanup_progress = 5;
     */
    float getCleanupProgress();

    // optional float setup_progress = 6;
    /**
     * optional float setup_progress = 6;
     */
    boolean hasSetupProgress();
    /**
     * optional float setup_progress = 6;
     */
    float getSetupProgress();

    // optional int64 start_time = 7;
    /**
     * optional int64 start_time = 7;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 7;
     */
    long getStartTime();

    // optional int64 finish_time = 8;
    /**
     * optional int64 finish_time = 8;
     */
    boolean hasFinishTime();
    /**
     * optional int64 finish_time = 8;
     */
    long getFinishTime();

    // optional string user = 9;
    /**
     * optional string user = 9;
     */
    boolean hasUser();
    /**
     * optional string user = 9;
     */
    java.lang.String getUser();
    /**
     * optional string user = 9;
     */
    com.google.protobuf.ByteString
        getUserBytes();

    // optional string jobName = 10;
    /**
     * optional string jobName = 10;
     */
    boolean hasJobName();
    /**
     * optional string jobName = 10;
     */
    java.lang.String getJobName();
    /**
     * optional string jobName = 10;
     */
    com.google.protobuf.ByteString
        getJobNameBytes();

    // optional string trackingUrl = 11;
    /**
     * optional string trackingUrl = 11;
     */
    boolean hasTrackingUrl();
    /**
     * optional string trackingUrl = 11;
     */
    java.lang.String getTrackingUrl();
    /**
     * optional string trackingUrl = 11;
     */
    com.google.protobuf.ByteString
        getTrackingUrlBytes();

    // optional string diagnostics = 12;
    /**
     * optional string diagnostics = 12;
     */
    boolean hasDiagnostics();
    /**
     * optional string diagnostics = 12;
     */
    java.lang.String getDiagnostics();
    /**
     * optional string diagnostics = 12;
     */
    com.google.protobuf.ByteString
        getDiagnosticsBytes();

    // optional string jobFile = 13;
    /**
     * optional string jobFile = 13;
     */
    boolean hasJobFile();
    /**
     * optional string jobFile = 13;
     */
    java.lang.String getJobFile();
    /**
     * optional string jobFile = 13;
     */
    com.google.protobuf.ByteString
        getJobFileBytes();

    // repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    java.util.List 
        getAmInfosList();
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto getAmInfos(int index);
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    int getAmInfosCount();
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    java.util.List 
        getAmInfosOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder getAmInfosOrBuilder(
        int index);

    // optional int64 submit_time = 15;
    /**
     * optional int64 submit_time = 15;
     */
    boolean hasSubmitTime();
    /**
     * optional int64 submit_time = 15;
     */
    long getSubmitTime();

    // optional bool is_uber = 16 [default = false];
    /**
     * optional bool is_uber = 16 [default = false];
     */
    boolean hasIsUber();
    /**
     * optional bool is_uber = 16 [default = false];
     */
    boolean getIsUber();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.JobReportProto}
   */
  public static final class JobReportProto extends
      com.google.protobuf.GeneratedMessage
      implements JobReportProtoOrBuilder {
    // Use JobReportProto.newBuilder() to construct.
    private JobReportProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private JobReportProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final JobReportProto defaultInstance;
    public static JobReportProto getDefaultInstance() {
      return defaultInstance;
    }

    public JobReportProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private JobReportProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                jobState_ = value;
              }
              break;
            }
            case 29: {
              bitField0_ |= 0x00000004;
              mapProgress_ = input.readFloat();
              break;
            }
            case 37: {
              bitField0_ |= 0x00000008;
              reduceProgress_ = input.readFloat();
              break;
            }
            case 45: {
              bitField0_ |= 0x00000010;
              cleanupProgress_ = input.readFloat();
              break;
            }
            case 53: {
              bitField0_ |= 0x00000020;
              setupProgress_ = input.readFloat();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              startTime_ = input.readInt64();
              break;
            }
            case 64: {
              bitField0_ |= 0x00000080;
              finishTime_ = input.readInt64();
              break;
            }
            case 74: {
              bitField0_ |= 0x00000100;
              user_ = input.readBytes();
              break;
            }
            case 82: {
              bitField0_ |= 0x00000200;
              jobName_ = input.readBytes();
              break;
            }
            case 90: {
              bitField0_ |= 0x00000400;
              trackingUrl_ = input.readBytes();
              break;
            }
            case 98: {
              bitField0_ |= 0x00000800;
              diagnostics_ = input.readBytes();
              break;
            }
            case 106: {
              bitField0_ |= 0x00001000;
              jobFile_ = input.readBytes();
              break;
            }
            case 114: {
              if (!((mutable_bitField0_ & 0x00002000) == 0x00002000)) {
                amInfos_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00002000;
              }
              amInfos_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.PARSER, extensionRegistry));
              break;
            }
            case 120: {
              bitField0_ |= 0x00002000;
              submitTime_ = input.readInt64();
              break;
            }
            case 128: {
              bitField0_ |= 0x00004000;
              isUber_ = input.readBool();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00002000) == 0x00002000)) {
          amInfos_ = java.util.Collections.unmodifiableList(amInfos_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobReportProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public JobReportProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new JobReportProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    // optional .hadoop.mapreduce.JobStateProto job_state = 2;
    public static final int JOB_STATE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto jobState_;
    /**
     * optional .hadoop.mapreduce.JobStateProto job_state = 2;
     */
    public boolean hasJobState() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.JobStateProto job_state = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto getJobState() {
      return jobState_;
    }

    // optional float map_progress = 3;
    public static final int MAP_PROGRESS_FIELD_NUMBER = 3;
    private float mapProgress_;
    /**
     * optional float map_progress = 3;
     */
    public boolean hasMapProgress() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional float map_progress = 3;
     */
    public float getMapProgress() {
      return mapProgress_;
    }

    // optional float reduce_progress = 4;
    public static final int REDUCE_PROGRESS_FIELD_NUMBER = 4;
    private float reduceProgress_;
    /**
     * optional float reduce_progress = 4;
     */
    public boolean hasReduceProgress() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    /**
     * optional float reduce_progress = 4;
     */
    public float getReduceProgress() {
      return reduceProgress_;
    }

    // optional float cleanup_progress = 5;
    public static final int CLEANUP_PROGRESS_FIELD_NUMBER = 5;
    private float cleanupProgress_;
    /**
     * optional float cleanup_progress = 5;
     */
    public boolean hasCleanupProgress() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    /**
     * optional float cleanup_progress = 5;
     */
    public float getCleanupProgress() {
      return cleanupProgress_;
    }

    // optional float setup_progress = 6;
    public static final int SETUP_PROGRESS_FIELD_NUMBER = 6;
    private float setupProgress_;
    /**
     * optional float setup_progress = 6;
     */
    public boolean hasSetupProgress() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    /**
     * optional float setup_progress = 6;
     */
    public float getSetupProgress() {
      return setupProgress_;
    }

    // optional int64 start_time = 7;
    public static final int START_TIME_FIELD_NUMBER = 7;
    private long startTime_;
    /**
     * optional int64 start_time = 7;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    /**
     * optional int64 start_time = 7;
     */
    public long getStartTime() {
      return startTime_;
    }

    // optional int64 finish_time = 8;
    public static final int FINISH_TIME_FIELD_NUMBER = 8;
    private long finishTime_;
    /**
     * optional int64 finish_time = 8;
     */
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000080) == 0x00000080);
    }
    /**
     * optional int64 finish_time = 8;
     */
    public long getFinishTime() {
      return finishTime_;
    }

    // optional string user = 9;
    public static final int USER_FIELD_NUMBER = 9;
    private java.lang.Object user_;
    /**
     * optional string user = 9;
     */
    public boolean hasUser() {
      return ((bitField0_ & 0x00000100) == 0x00000100);
    }
    /**
     * optional string user = 9;
     */
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * optional string user = 9;
     */
    public com.google.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string jobName = 10;
    public static final int JOBNAME_FIELD_NUMBER = 10;
    private java.lang.Object jobName_;
    /**
     * optional string jobName = 10;
     */
    public boolean hasJobName() {
      return ((bitField0_ & 0x00000200) == 0x00000200);
    }
    /**
     * optional string jobName = 10;
     */
    public java.lang.String getJobName() {
      java.lang.Object ref = jobName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          jobName_ = s;
        }
        return s;
      }
    }
    /**
     * optional string jobName = 10;
     */
    public com.google.protobuf.ByteString
        getJobNameBytes() {
      java.lang.Object ref = jobName_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        jobName_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string trackingUrl = 11;
    public static final int TRACKINGURL_FIELD_NUMBER = 11;
    private java.lang.Object trackingUrl_;
    /**
     * optional string trackingUrl = 11;
     */
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000400) == 0x00000400);
    }
    /**
     * optional string trackingUrl = 11;
     */
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * optional string trackingUrl = 11;
     */
    public com.google.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string diagnostics = 12;
    public static final int DIAGNOSTICS_FIELD_NUMBER = 12;
    private java.lang.Object diagnostics_;
    /**
     * optional string diagnostics = 12;
     */
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000800) == 0x00000800);
    }
    /**
     * optional string diagnostics = 12;
     */
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * optional string diagnostics = 12;
     */
    public com.google.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional string jobFile = 13;
    public static final int JOBFILE_FIELD_NUMBER = 13;
    private java.lang.Object jobFile_;
    /**
     * optional string jobFile = 13;
     */
    public boolean hasJobFile() {
      return ((bitField0_ & 0x00001000) == 0x00001000);
    }
    /**
     * optional string jobFile = 13;
     */
    public java.lang.String getJobFile() {
      java.lang.Object ref = jobFile_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          jobFile_ = s;
        }
        return s;
      }
    }
    /**
     * optional string jobFile = 13;
     */
    public com.google.protobuf.ByteString
        getJobFileBytes() {
      java.lang.Object ref = jobFile_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        jobFile_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
    public static final int AM_INFOS_FIELD_NUMBER = 14;
    private java.util.List amInfos_;
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    public java.util.List getAmInfosList() {
      return amInfos_;
    }
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    public java.util.List 
        getAmInfosOrBuilderList() {
      return amInfos_;
    }
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    public int getAmInfosCount() {
      return amInfos_.size();
    }
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto getAmInfos(int index) {
      return amInfos_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder getAmInfosOrBuilder(
        int index) {
      return amInfos_.get(index);
    }

    // optional int64 submit_time = 15;
    public static final int SUBMIT_TIME_FIELD_NUMBER = 15;
    private long submitTime_;
    /**
     * optional int64 submit_time = 15;
     */
    public boolean hasSubmitTime() {
      return ((bitField0_ & 0x00002000) == 0x00002000);
    }
    /**
     * optional int64 submit_time = 15;
     */
    public long getSubmitTime() {
      return submitTime_;
    }

    // optional bool is_uber = 16 [default = false];
    public static final int IS_UBER_FIELD_NUMBER = 16;
    private boolean isUber_;
    /**
     * optional bool is_uber = 16 [default = false];
     */
    public boolean hasIsUber() {
      return ((bitField0_ & 0x00004000) == 0x00004000);
    }
    /**
     * optional bool is_uber = 16 [default = false];
     */
    public boolean getIsUber() {
      return isUber_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      jobState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto.J_NEW;
      mapProgress_ = 0F;
      reduceProgress_ = 0F;
      cleanupProgress_ = 0F;
      setupProgress_ = 0F;
      startTime_ = 0L;
      finishTime_ = 0L;
      user_ = "";
      jobName_ = "";
      trackingUrl_ = "";
      diagnostics_ = "";
      jobFile_ = "";
      amInfos_ = java.util.Collections.emptyList();
      submitTime_ = 0L;
      isUber_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, jobState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeFloat(3, mapProgress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeFloat(4, reduceProgress_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeFloat(5, cleanupProgress_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeFloat(6, setupProgress_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeInt64(7, startTime_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        output.writeInt64(8, finishTime_);
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        output.writeBytes(9, getUserBytes());
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        output.writeBytes(10, getJobNameBytes());
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        output.writeBytes(11, getTrackingUrlBytes());
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        output.writeBytes(12, getDiagnosticsBytes());
      }
      if (((bitField0_ & 0x00001000) == 0x00001000)) {
        output.writeBytes(13, getJobFileBytes());
      }
      for (int i = 0; i < amInfos_.size(); i++) {
        output.writeMessage(14, amInfos_.get(i));
      }
      if (((bitField0_ & 0x00002000) == 0x00002000)) {
        output.writeInt64(15, submitTime_);
      }
      if (((bitField0_ & 0x00004000) == 0x00004000)) {
        output.writeBool(16, isUber_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, jobState_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(3, mapProgress_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(4, reduceProgress_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(5, cleanupProgress_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeFloatSize(6, setupProgress_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(7, startTime_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(8, finishTime_);
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(9, getUserBytes());
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(10, getJobNameBytes());
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(11, getTrackingUrlBytes());
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(12, getDiagnosticsBytes());
      }
      if (((bitField0_ & 0x00001000) == 0x00001000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(13, getJobFileBytes());
      }
      for (int i = 0; i < amInfos_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(14, amInfos_.get(i));
      }
      if (((bitField0_ & 0x00002000) == 0x00002000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(15, submitTime_);
      }
      if (((bitField0_ & 0x00004000) == 0x00004000)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(16, isUber_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result && (hasJobState() == other.hasJobState());
      if (hasJobState()) {
        result = result &&
            (getJobState() == other.getJobState());
      }
      result = result && (hasMapProgress() == other.hasMapProgress());
      if (hasMapProgress()) {
        result = result && (Float.floatToIntBits(getMapProgress())    == Float.floatToIntBits(other.getMapProgress()));
      }
      result = result && (hasReduceProgress() == other.hasReduceProgress());
      if (hasReduceProgress()) {
        result = result && (Float.floatToIntBits(getReduceProgress())    == Float.floatToIntBits(other.getReduceProgress()));
      }
      result = result && (hasCleanupProgress() == other.hasCleanupProgress());
      if (hasCleanupProgress()) {
        result = result && (Float.floatToIntBits(getCleanupProgress())    == Float.floatToIntBits(other.getCleanupProgress()));
      }
      result = result && (hasSetupProgress() == other.hasSetupProgress());
      if (hasSetupProgress()) {
        result = result && (Float.floatToIntBits(getSetupProgress())    == Float.floatToIntBits(other.getSetupProgress()));
      }
      result = result && (hasStartTime() == other.hasStartTime());
      if (hasStartTime()) {
        result = result && (getStartTime()
            == other.getStartTime());
      }
      result = result && (hasFinishTime() == other.hasFinishTime());
      if (hasFinishTime()) {
        result = result && (getFinishTime()
            == other.getFinishTime());
      }
      result = result && (hasUser() == other.hasUser());
      if (hasUser()) {
        result = result && getUser()
            .equals(other.getUser());
      }
      result = result && (hasJobName() == other.hasJobName());
      if (hasJobName()) {
        result = result && getJobName()
            .equals(other.getJobName());
      }
      result = result && (hasTrackingUrl() == other.hasTrackingUrl());
      if (hasTrackingUrl()) {
        result = result && getTrackingUrl()
            .equals(other.getTrackingUrl());
      }
      result = result && (hasDiagnostics() == other.hasDiagnostics());
      if (hasDiagnostics()) {
        result = result && getDiagnostics()
            .equals(other.getDiagnostics());
      }
      result = result && (hasJobFile() == other.hasJobFile());
      if (hasJobFile()) {
        result = result && getJobFile()
            .equals(other.getJobFile());
      }
      result = result && getAmInfosList()
          .equals(other.getAmInfosList());
      result = result && (hasSubmitTime() == other.hasSubmitTime());
      if (hasSubmitTime()) {
        result = result && (getSubmitTime()
            == other.getSubmitTime());
      }
      result = result && (hasIsUber() == other.hasIsUber());
      if (hasIsUber()) {
        result = result && (getIsUber()
            == other.getIsUber());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      if (hasJobState()) {
        hash = (37 * hash) + JOB_STATE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getJobState());
      }
      if (hasMapProgress()) {
        hash = (37 * hash) + MAP_PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getMapProgress());
      }
      if (hasReduceProgress()) {
        hash = (37 * hash) + REDUCE_PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getReduceProgress());
      }
      if (hasCleanupProgress()) {
        hash = (37 * hash) + CLEANUP_PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getCleanupProgress());
      }
      if (hasSetupProgress()) {
        hash = (37 * hash) + SETUP_PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + Float.floatToIntBits(
            getSetupProgress());
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getFinishTime());
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasJobName()) {
        hash = (37 * hash) + JOBNAME_FIELD_NUMBER;
        hash = (53 * hash) + getJobName().hashCode();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKINGURL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      if (hasJobFile()) {
        hash = (37 * hash) + JOBFILE_FIELD_NUMBER;
        hash = (53 * hash) + getJobFile().hashCode();
      }
      if (getAmInfosCount() > 0) {
        hash = (37 * hash) + AM_INFOS_FIELD_NUMBER;
        hash = (53 * hash) + getAmInfosList().hashCode();
      }
      if (hasSubmitTime()) {
        hash = (37 * hash) + SUBMIT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getSubmitTime());
      }
      if (hasIsUber()) {
        hash = (37 * hash) + IS_UBER_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getIsUber());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.JobReportProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobReportProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
          getAmInfosFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        jobState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto.J_NEW;
        bitField0_ = (bitField0_ & ~0x00000002);
        mapProgress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000004);
        reduceProgress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000008);
        cleanupProgress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000010);
        setupProgress_ = 0F;
        bitField0_ = (bitField0_ & ~0x00000020);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000040);
        finishTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000080);
        user_ = "";
        bitField0_ = (bitField0_ & ~0x00000100);
        jobName_ = "";
        bitField0_ = (bitField0_ & ~0x00000200);
        trackingUrl_ = "";
        bitField0_ = (bitField0_ & ~0x00000400);
        diagnostics_ = "";
        bitField0_ = (bitField0_ & ~0x00000800);
        jobFile_ = "";
        bitField0_ = (bitField0_ & ~0x00001000);
        if (amInfosBuilder_ == null) {
          amInfos_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00002000);
        } else {
          amInfosBuilder_.clear();
        }
        submitTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00004000);
        isUber_ = false;
        bitField0_ = (bitField0_ & ~0x00008000);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_JobReportProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.jobState_ = jobState_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.mapProgress_ = mapProgress_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.reduceProgress_ = reduceProgress_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.cleanupProgress_ = cleanupProgress_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.setupProgress_ = setupProgress_;
        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
          to_bitField0_ |= 0x00000040;
        }
        result.startTime_ = startTime_;
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000080;
        }
        result.finishTime_ = finishTime_;
        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
          to_bitField0_ |= 0x00000100;
        }
        result.user_ = user_;
        if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
          to_bitField0_ |= 0x00000200;
        }
        result.jobName_ = jobName_;
        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
          to_bitField0_ |= 0x00000400;
        }
        result.trackingUrl_ = trackingUrl_;
        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
          to_bitField0_ |= 0x00000800;
        }
        result.diagnostics_ = diagnostics_;
        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
          to_bitField0_ |= 0x00001000;
        }
        result.jobFile_ = jobFile_;
        if (amInfosBuilder_ == null) {
          if (((bitField0_ & 0x00002000) == 0x00002000)) {
            amInfos_ = java.util.Collections.unmodifiableList(amInfos_);
            bitField0_ = (bitField0_ & ~0x00002000);
          }
          result.amInfos_ = amInfos_;
        } else {
          result.amInfos_ = amInfosBuilder_.build();
        }
        if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
          to_bitField0_ |= 0x00002000;
        }
        result.submitTime_ = submitTime_;
        if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
          to_bitField0_ |= 0x00004000;
        }
        result.isUber_ = isUber_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        if (other.hasJobState()) {
          setJobState(other.getJobState());
        }
        if (other.hasMapProgress()) {
          setMapProgress(other.getMapProgress());
        }
        if (other.hasReduceProgress()) {
          setReduceProgress(other.getReduceProgress());
        }
        if (other.hasCleanupProgress()) {
          setCleanupProgress(other.getCleanupProgress());
        }
        if (other.hasSetupProgress()) {
          setSetupProgress(other.getSetupProgress());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasUser()) {
          bitField0_ |= 0x00000100;
          user_ = other.user_;
          onChanged();
        }
        if (other.hasJobName()) {
          bitField0_ |= 0x00000200;
          jobName_ = other.jobName_;
          onChanged();
        }
        if (other.hasTrackingUrl()) {
          bitField0_ |= 0x00000400;
          trackingUrl_ = other.trackingUrl_;
          onChanged();
        }
        if (other.hasDiagnostics()) {
          bitField0_ |= 0x00000800;
          diagnostics_ = other.diagnostics_;
          onChanged();
        }
        if (other.hasJobFile()) {
          bitField0_ |= 0x00001000;
          jobFile_ = other.jobFile_;
          onChanged();
        }
        if (amInfosBuilder_ == null) {
          if (!other.amInfos_.isEmpty()) {
            if (amInfos_.isEmpty()) {
              amInfos_ = other.amInfos_;
              bitField0_ = (bitField0_ & ~0x00002000);
            } else {
              ensureAmInfosIsMutable();
              amInfos_.addAll(other.amInfos_);
            }
            onChanged();
          }
        } else {
          if (!other.amInfos_.isEmpty()) {
            if (amInfosBuilder_.isEmpty()) {
              amInfosBuilder_.dispose();
              amInfosBuilder_ = null;
              amInfos_ = other.amInfos_;
              bitField0_ = (bitField0_ & ~0x00002000);
              amInfosBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getAmInfosFieldBuilder() : null;
            } else {
              amInfosBuilder_.addAllMessages(other.amInfos_);
            }
          }
        }
        if (other.hasSubmitTime()) {
          setSubmitTime(other.getSubmitTime());
        }
        if (other.hasIsUber()) {
          setIsUber(other.getIsUber());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // optional .hadoop.mapreduce.JobStateProto job_state = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto jobState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto.J_NEW;
      /**
       * optional .hadoop.mapreduce.JobStateProto job_state = 2;
       */
      public boolean hasJobState() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.JobStateProto job_state = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto getJobState() {
        return jobState_;
      }
      /**
       * optional .hadoop.mapreduce.JobStateProto job_state = 2;
       */
      public Builder setJobState(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        jobState_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobStateProto job_state = 2;
       */
      public Builder clearJobState() {
        bitField0_ = (bitField0_ & ~0x00000002);
        jobState_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobStateProto.J_NEW;
        onChanged();
        return this;
      }

      // optional float map_progress = 3;
      private float mapProgress_ ;
      /**
       * optional float map_progress = 3;
       */
      public boolean hasMapProgress() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional float map_progress = 3;
       */
      public float getMapProgress() {
        return mapProgress_;
      }
      /**
       * optional float map_progress = 3;
       */
      public Builder setMapProgress(float value) {
        bitField0_ |= 0x00000004;
        mapProgress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float map_progress = 3;
       */
      public Builder clearMapProgress() {
        bitField0_ = (bitField0_ & ~0x00000004);
        mapProgress_ = 0F;
        onChanged();
        return this;
      }

      // optional float reduce_progress = 4;
      private float reduceProgress_ ;
      /**
       * optional float reduce_progress = 4;
       */
      public boolean hasReduceProgress() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * optional float reduce_progress = 4;
       */
      public float getReduceProgress() {
        return reduceProgress_;
      }
      /**
       * optional float reduce_progress = 4;
       */
      public Builder setReduceProgress(float value) {
        bitField0_ |= 0x00000008;
        reduceProgress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float reduce_progress = 4;
       */
      public Builder clearReduceProgress() {
        bitField0_ = (bitField0_ & ~0x00000008);
        reduceProgress_ = 0F;
        onChanged();
        return this;
      }

      // optional float cleanup_progress = 5;
      private float cleanupProgress_ ;
      /**
       * optional float cleanup_progress = 5;
       */
      public boolean hasCleanupProgress() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      /**
       * optional float cleanup_progress = 5;
       */
      public float getCleanupProgress() {
        return cleanupProgress_;
      }
      /**
       * optional float cleanup_progress = 5;
       */
      public Builder setCleanupProgress(float value) {
        bitField0_ |= 0x00000010;
        cleanupProgress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float cleanup_progress = 5;
       */
      public Builder clearCleanupProgress() {
        bitField0_ = (bitField0_ & ~0x00000010);
        cleanupProgress_ = 0F;
        onChanged();
        return this;
      }

      // optional float setup_progress = 6;
      private float setupProgress_ ;
      /**
       * optional float setup_progress = 6;
       */
      public boolean hasSetupProgress() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      /**
       * optional float setup_progress = 6;
       */
      public float getSetupProgress() {
        return setupProgress_;
      }
      /**
       * optional float setup_progress = 6;
       */
      public Builder setSetupProgress(float value) {
        bitField0_ |= 0x00000020;
        setupProgress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional float setup_progress = 6;
       */
      public Builder clearSetupProgress() {
        bitField0_ = (bitField0_ & ~0x00000020);
        setupProgress_ = 0F;
        onChanged();
        return this;
      }

      // optional int64 start_time = 7;
      private long startTime_ ;
      /**
       * optional int64 start_time = 7;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000040) == 0x00000040);
      }
      /**
       * optional int64 start_time = 7;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000040;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 7;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000040);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      // optional int64 finish_time = 8;
      private long finishTime_ ;
      /**
       * optional int64 finish_time = 8;
       */
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      /**
       * optional int64 finish_time = 8;
       */
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * optional int64 finish_time = 8;
       */
      public Builder setFinishTime(long value) {
        bitField0_ |= 0x00000080;
        finishTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 finish_time = 8;
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000080);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      // optional string user = 9;
      private java.lang.Object user_ = "";
      /**
       * optional string user = 9;
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000100) == 0x00000100);
      }
      /**
       * optional string user = 9;
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          user_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string user = 9;
       */
      public com.google.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string user = 9;
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000100;
        user_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string user = 9;
       */
      public Builder clearUser() {
        bitField0_ = (bitField0_ & ~0x00000100);
        user_ = getDefaultInstance().getUser();
        onChanged();
        return this;
      }
      /**
       * optional string user = 9;
       */
      public Builder setUserBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000100;
        user_ = value;
        onChanged();
        return this;
      }

      // optional string jobName = 10;
      private java.lang.Object jobName_ = "";
      /**
       * optional string jobName = 10;
       */
      public boolean hasJobName() {
        return ((bitField0_ & 0x00000200) == 0x00000200);
      }
      /**
       * optional string jobName = 10;
       */
      public java.lang.String getJobName() {
        java.lang.Object ref = jobName_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          jobName_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string jobName = 10;
       */
      public com.google.protobuf.ByteString
          getJobNameBytes() {
        java.lang.Object ref = jobName_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          jobName_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string jobName = 10;
       */
      public Builder setJobName(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000200;
        jobName_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string jobName = 10;
       */
      public Builder clearJobName() {
        bitField0_ = (bitField0_ & ~0x00000200);
        jobName_ = getDefaultInstance().getJobName();
        onChanged();
        return this;
      }
      /**
       * optional string jobName = 10;
       */
      public Builder setJobNameBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000200;
        jobName_ = value;
        onChanged();
        return this;
      }

      // optional string trackingUrl = 11;
      private java.lang.Object trackingUrl_ = "";
      /**
       * optional string trackingUrl = 11;
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000400) == 0x00000400);
      }
      /**
       * optional string trackingUrl = 11;
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          trackingUrl_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string trackingUrl = 11;
       */
      public com.google.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string trackingUrl = 11;
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000400;
        trackingUrl_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string trackingUrl = 11;
       */
      public Builder clearTrackingUrl() {
        bitField0_ = (bitField0_ & ~0x00000400);
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        onChanged();
        return this;
      }
      /**
       * optional string trackingUrl = 11;
       */
      public Builder setTrackingUrlBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000400;
        trackingUrl_ = value;
        onChanged();
        return this;
      }

      // optional string diagnostics = 12;
      private java.lang.Object diagnostics_ = "";
      /**
       * optional string diagnostics = 12;
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000800) == 0x00000800);
      }
      /**
       * optional string diagnostics = 12;
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          diagnostics_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string diagnostics = 12;
       */
      public com.google.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string diagnostics = 12;
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000800;
        diagnostics_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics = 12;
       */
      public Builder clearDiagnostics() {
        bitField0_ = (bitField0_ & ~0x00000800);
        diagnostics_ = getDefaultInstance().getDiagnostics();
        onChanged();
        return this;
      }
      /**
       * optional string diagnostics = 12;
       */
      public Builder setDiagnosticsBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000800;
        diagnostics_ = value;
        onChanged();
        return this;
      }

      // optional string jobFile = 13;
      private java.lang.Object jobFile_ = "";
      /**
       * optional string jobFile = 13;
       */
      public boolean hasJobFile() {
        return ((bitField0_ & 0x00001000) == 0x00001000);
      }
      /**
       * optional string jobFile = 13;
       */
      public java.lang.String getJobFile() {
        java.lang.Object ref = jobFile_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          jobFile_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string jobFile = 13;
       */
      public com.google.protobuf.ByteString
          getJobFileBytes() {
        java.lang.Object ref = jobFile_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          jobFile_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string jobFile = 13;
       */
      public Builder setJobFile(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00001000;
        jobFile_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string jobFile = 13;
       */
      public Builder clearJobFile() {
        bitField0_ = (bitField0_ & ~0x00001000);
        jobFile_ = getDefaultInstance().getJobFile();
        onChanged();
        return this;
      }
      /**
       * optional string jobFile = 13;
       */
      public Builder setJobFileBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00001000;
        jobFile_ = value;
        onChanged();
        return this;
      }

      // repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
      private java.util.List amInfos_ =
        java.util.Collections.emptyList();
      private void ensureAmInfosIsMutable() {
        if (!((bitField0_ & 0x00002000) == 0x00002000)) {
          amInfos_ = new java.util.ArrayList(amInfos_);
          bitField0_ |= 0x00002000;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder> amInfosBuilder_;

      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public java.util.List getAmInfosList() {
        if (amInfosBuilder_ == null) {
          return java.util.Collections.unmodifiableList(amInfos_);
        } else {
          return amInfosBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public int getAmInfosCount() {
        if (amInfosBuilder_ == null) {
          return amInfos_.size();
        } else {
          return amInfosBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto getAmInfos(int index) {
        if (amInfosBuilder_ == null) {
          return amInfos_.get(index);
        } else {
          return amInfosBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder setAmInfos(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto value) {
        if (amInfosBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmInfosIsMutable();
          amInfos_.set(index, value);
          onChanged();
        } else {
          amInfosBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder setAmInfos(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder builderForValue) {
        if (amInfosBuilder_ == null) {
          ensureAmInfosIsMutable();
          amInfos_.set(index, builderForValue.build());
          onChanged();
        } else {
          amInfosBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder addAmInfos(org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto value) {
        if (amInfosBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmInfosIsMutable();
          amInfos_.add(value);
          onChanged();
        } else {
          amInfosBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder addAmInfos(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto value) {
        if (amInfosBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmInfosIsMutable();
          amInfos_.add(index, value);
          onChanged();
        } else {
          amInfosBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder addAmInfos(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder builderForValue) {
        if (amInfosBuilder_ == null) {
          ensureAmInfosIsMutable();
          amInfos_.add(builderForValue.build());
          onChanged();
        } else {
          amInfosBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder addAmInfos(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder builderForValue) {
        if (amInfosBuilder_ == null) {
          ensureAmInfosIsMutable();
          amInfos_.add(index, builderForValue.build());
          onChanged();
        } else {
          amInfosBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder addAllAmInfos(
          java.lang.Iterable values) {
        if (amInfosBuilder_ == null) {
          ensureAmInfosIsMutable();
          super.addAll(values, amInfos_);
          onChanged();
        } else {
          amInfosBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder clearAmInfos() {
        if (amInfosBuilder_ == null) {
          amInfos_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00002000);
          onChanged();
        } else {
          amInfosBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public Builder removeAmInfos(int index) {
        if (amInfosBuilder_ == null) {
          ensureAmInfosIsMutable();
          amInfos_.remove(index);
          onChanged();
        } else {
          amInfosBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder getAmInfosBuilder(
          int index) {
        return getAmInfosFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder getAmInfosOrBuilder(
          int index) {
        if (amInfosBuilder_ == null) {
          return amInfos_.get(index);  } else {
          return amInfosBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public java.util.List 
           getAmInfosOrBuilderList() {
        if (amInfosBuilder_ != null) {
          return amInfosBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(amInfos_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder addAmInfosBuilder() {
        return getAmInfosFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder addAmInfosBuilder(
          int index) {
        return getAmInfosFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
       */
      public java.util.List 
           getAmInfosBuilderList() {
        return getAmInfosFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder> 
          getAmInfosFieldBuilder() {
        if (amInfosBuilder_ == null) {
          amInfosBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder>(
                  amInfos_,
                  ((bitField0_ & 0x00002000) == 0x00002000),
                  getParentForChildren(),
                  isClean());
          amInfos_ = null;
        }
        return amInfosBuilder_;
      }

      // optional int64 submit_time = 15;
      private long submitTime_ ;
      /**
       * optional int64 submit_time = 15;
       */
      public boolean hasSubmitTime() {
        return ((bitField0_ & 0x00004000) == 0x00004000);
      }
      /**
       * optional int64 submit_time = 15;
       */
      public long getSubmitTime() {
        return submitTime_;
      }
      /**
       * optional int64 submit_time = 15;
       */
      public Builder setSubmitTime(long value) {
        bitField0_ |= 0x00004000;
        submitTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 submit_time = 15;
       */
      public Builder clearSubmitTime() {
        bitField0_ = (bitField0_ & ~0x00004000);
        submitTime_ = 0L;
        onChanged();
        return this;
      }

      // optional bool is_uber = 16 [default = false];
      private boolean isUber_ ;
      /**
       * optional bool is_uber = 16 [default = false];
       */
      public boolean hasIsUber() {
        return ((bitField0_ & 0x00008000) == 0x00008000);
      }
      /**
       * optional bool is_uber = 16 [default = false];
       */
      public boolean getIsUber() {
        return isUber_;
      }
      /**
       * optional bool is_uber = 16 [default = false];
       */
      public Builder setIsUber(boolean value) {
        bitField0_ |= 0x00008000;
        isUber_ = value;
        onChanged();
        return this;
      }
      /**
       * optional bool is_uber = 16 [default = false];
       */
      public Builder clearIsUber() {
        bitField0_ = (bitField0_ & ~0x00008000);
        isUber_ = false;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.JobReportProto)
    }

    static {
      defaultInstance = new JobReportProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.JobReportProto)
  }

  public interface AMInfoProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    boolean hasApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();

    // optional int64 start_time = 2;
    /**
     * optional int64 start_time = 2;
     */
    boolean hasStartTime();
    /**
     * optional int64 start_time = 2;
     */
    long getStartTime();

    // optional .hadoop.yarn.ContainerIdProto container_id = 3;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    boolean hasContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    // optional string node_manager_host = 4;
    /**
     * optional string node_manager_host = 4;
     */
    boolean hasNodeManagerHost();
    /**
     * optional string node_manager_host = 4;
     */
    java.lang.String getNodeManagerHost();
    /**
     * optional string node_manager_host = 4;
     */
    com.google.protobuf.ByteString
        getNodeManagerHostBytes();

    // optional int32 node_manager_port = 5;
    /**
     * optional int32 node_manager_port = 5;
     */
    boolean hasNodeManagerPort();
    /**
     * optional int32 node_manager_port = 5;
     */
    int getNodeManagerPort();

    // optional int32 node_manager_http_port = 6;
    /**
     * optional int32 node_manager_http_port = 6;
     */
    boolean hasNodeManagerHttpPort();
    /**
     * optional int32 node_manager_http_port = 6;
     */
    int getNodeManagerHttpPort();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.AMInfoProto}
   */
  public static final class AMInfoProto extends
      com.google.protobuf.GeneratedMessage
      implements AMInfoProtoOrBuilder {
    // Use AMInfoProto.newBuilder() to construct.
    private AMInfoProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private AMInfoProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final AMInfoProto defaultInstance;
    public static AMInfoProto getDefaultInstance() {
      return defaultInstance;
    }

    public AMInfoProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private AMInfoProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = applicationAttemptId_.toBuilder();
              }
              applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(applicationAttemptId_);
                applicationAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              startTime_ = input.readInt64();
              break;
            }
            case 26: {
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000004) == 0x00000004)) {
                subBuilder = containerId_.toBuilder();
              }
              containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(containerId_);
                containerId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000004;
              break;
            }
            case 34: {
              bitField0_ |= 0x00000008;
              nodeManagerHost_ = input.readBytes();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              nodeManagerPort_ = input.readInt32();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              nodeManagerHttpPort_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_AMInfoProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_AMInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public AMInfoProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new AMInfoProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_;
    }
    /**
     * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_;
    }

    // optional int64 start_time = 2;
    public static final int START_TIME_FIELD_NUMBER = 2;
    private long startTime_;
    /**
     * optional int64 start_time = 2;
     */
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional int64 start_time = 2;
     */
    public long getStartTime() {
      return startTime_;
    }

    // optional .hadoop.yarn.ContainerIdProto container_id = 3;
    public static final int CONTAINER_ID_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_;
    }
    /**
     * optional .hadoop.yarn.ContainerIdProto container_id = 3;
     */
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_;
    }

    // optional string node_manager_host = 4;
    public static final int NODE_MANAGER_HOST_FIELD_NUMBER = 4;
    private java.lang.Object nodeManagerHost_;
    /**
     * optional string node_manager_host = 4;
     */
    public boolean hasNodeManagerHost() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    /**
     * optional string node_manager_host = 4;
     */
    public java.lang.String getNodeManagerHost() {
      java.lang.Object ref = nodeManagerHost_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeManagerHost_ = s;
        }
        return s;
      }
    }
    /**
     * optional string node_manager_host = 4;
     */
    public com.google.protobuf.ByteString
        getNodeManagerHostBytes() {
      java.lang.Object ref = nodeManagerHost_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeManagerHost_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional int32 node_manager_port = 5;
    public static final int NODE_MANAGER_PORT_FIELD_NUMBER = 5;
    private int nodeManagerPort_;
    /**
     * optional int32 node_manager_port = 5;
     */
    public boolean hasNodeManagerPort() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    /**
     * optional int32 node_manager_port = 5;
     */
    public int getNodeManagerPort() {
      return nodeManagerPort_;
    }

    // optional int32 node_manager_http_port = 6;
    public static final int NODE_MANAGER_HTTP_PORT_FIELD_NUMBER = 6;
    private int nodeManagerHttpPort_;
    /**
     * optional int32 node_manager_http_port = 6;
     */
    public boolean hasNodeManagerHttpPort() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    /**
     * optional int32 node_manager_http_port = 6;
     */
    public int getNodeManagerHttpPort() {
      return nodeManagerHttpPort_;
    }

    private void initFields() {
      applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance();
      startTime_ = 0L;
      containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
      nodeManagerHost_ = "";
      nodeManagerPort_ = 0;
      nodeManagerHttpPort_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, applicationAttemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeInt64(2, startTime_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeMessage(3, containerId_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeBytes(4, getNodeManagerHostBytes());
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeInt32(5, nodeManagerPort_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeInt32(6, nodeManagerHttpPort_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, applicationAttemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt64Size(2, startTime_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, containerId_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(4, getNodeManagerHostBytes());
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(5, nodeManagerPort_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(6, nodeManagerHttpPort_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto) obj;

      boolean result = true;
      result = result && (hasApplicationAttemptId() == other.hasApplicationAttemptId());
      if (hasApplicationAttemptId()) {
        result = result && getApplicationAttemptId()
            .equals(other.getApplicationAttemptId());
      }
      result = result && (hasStartTime() == other.hasStartTime());
      if (hasStartTime()) {
        result = result && (getStartTime()
            == other.getStartTime());
      }
      result = result && (hasContainerId() == other.hasContainerId());
      if (hasContainerId()) {
        result = result && getContainerId()
            .equals(other.getContainerId());
      }
      result = result && (hasNodeManagerHost() == other.hasNodeManagerHost());
      if (hasNodeManagerHost()) {
        result = result && getNodeManagerHost()
            .equals(other.getNodeManagerHost());
      }
      result = result && (hasNodeManagerPort() == other.hasNodeManagerPort());
      if (hasNodeManagerPort()) {
        result = result && (getNodeManagerPort()
            == other.getNodeManagerPort());
      }
      result = result && (hasNodeManagerHttpPort() == other.hasNodeManagerHttpPort());
      if (hasNodeManagerHttpPort()) {
        result = result && (getNodeManagerHttpPort()
            == other.getNodeManagerHttpPort());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getStartTime());
      }
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasNodeManagerHost()) {
        hash = (37 * hash) + NODE_MANAGER_HOST_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerHost().hashCode();
      }
      if (hasNodeManagerPort()) {
        hash = (37 * hash) + NODE_MANAGER_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerPort();
      }
      if (hasNodeManagerHttpPort()) {
        hash = (37 * hash) + NODE_MANAGER_HTTP_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getNodeManagerHttpPort();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.AMInfoProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_AMInfoProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_AMInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
          getContainerIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance();
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        startTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        if (containerIdBuilder_ == null) {
          containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        nodeManagerHost_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        nodeManagerPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000010);
        nodeManagerHttpPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_AMInfoProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (applicationAttemptIdBuilder_ == null) {
          result.applicationAttemptId_ = applicationAttemptId_;
        } else {
          result.applicationAttemptId_ = applicationAttemptIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.startTime_ = startTime_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        if (containerIdBuilder_ == null) {
          result.containerId_ = containerId_;
        } else {
          result.containerId_ = containerIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.nodeManagerHost_ = nodeManagerHost_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.nodeManagerPort_ = nodeManagerPort_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.nodeManagerHttpPort_ = nodeManagerHttpPort_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasNodeManagerHost()) {
          bitField0_ |= 0x00000008;
          nodeManagerHost_ = other.nodeManagerHost_;
          onChanged();
        }
        if (other.hasNodeManagerPort()) {
          setNodeManagerPort(other.getNodeManagerPort());
        }
        if (other.hasNodeManagerHttpPort()) {
          setNodeManagerHttpPort(other.getNodeManagerHttpPort());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            applicationAttemptId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial();
          } else {
            applicationAttemptId_ = value;
          }
          onChanged();
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public Builder clearApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          applicationAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_;
        }
      }
      /**
       * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  applicationAttemptId_,
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }

      // optional int64 start_time = 2;
      private long startTime_ ;
      /**
       * optional int64 start_time = 2;
       */
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional int64 start_time = 2;
       */
      public long getStartTime() {
        return startTime_;
      }
      /**
       * optional int64 start_time = 2;
       */
      public Builder setStartTime(long value) {
        bitField0_ |= 0x00000002;
        startTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int64 start_time = 2;
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000002);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      // optional .hadoop.yarn.ContainerIdProto container_id = 3;
      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
          onChanged();
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
          onChanged();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004) &&
              containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            containerId_ =
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial();
          } else {
            containerId_ = value;
          }
          onChanged();
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public Builder clearContainerId() {
        if (containerIdBuilder_ == null) {
          containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
          onChanged();
        } else {
          containerIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_;
        }
      }
      /**
       * optional .hadoop.yarn.ContainerIdProto container_id = 3;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  containerId_,
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      // optional string node_manager_host = 4;
      private java.lang.Object nodeManagerHost_ = "";
      /**
       * optional string node_manager_host = 4;
       */
      public boolean hasNodeManagerHost() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * optional string node_manager_host = 4;
       */
      public java.lang.String getNodeManagerHost() {
        java.lang.Object ref = nodeManagerHost_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          nodeManagerHost_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string node_manager_host = 4;
       */
      public com.google.protobuf.ByteString
          getNodeManagerHostBytes() {
        java.lang.Object ref = nodeManagerHost_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeManagerHost_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string node_manager_host = 4;
       */
      public Builder setNodeManagerHost(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        nodeManagerHost_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string node_manager_host = 4;
       */
      public Builder clearNodeManagerHost() {
        bitField0_ = (bitField0_ & ~0x00000008);
        nodeManagerHost_ = getDefaultInstance().getNodeManagerHost();
        onChanged();
        return this;
      }
      /**
       * optional string node_manager_host = 4;
       */
      public Builder setNodeManagerHostBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        nodeManagerHost_ = value;
        onChanged();
        return this;
      }

      // optional int32 node_manager_port = 5;
      private int nodeManagerPort_ ;
      /**
       * optional int32 node_manager_port = 5;
       */
      public boolean hasNodeManagerPort() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      /**
       * optional int32 node_manager_port = 5;
       */
      public int getNodeManagerPort() {
        return nodeManagerPort_;
      }
      /**
       * optional int32 node_manager_port = 5;
       */
      public Builder setNodeManagerPort(int value) {
        bitField0_ |= 0x00000010;
        nodeManagerPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 node_manager_port = 5;
       */
      public Builder clearNodeManagerPort() {
        bitField0_ = (bitField0_ & ~0x00000010);
        nodeManagerPort_ = 0;
        onChanged();
        return this;
      }

      // optional int32 node_manager_http_port = 6;
      private int nodeManagerHttpPort_ ;
      /**
       * optional int32 node_manager_http_port = 6;
       */
      public boolean hasNodeManagerHttpPort() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      /**
       * optional int32 node_manager_http_port = 6;
       */
      public int getNodeManagerHttpPort() {
        return nodeManagerHttpPort_;
      }
      /**
       * optional int32 node_manager_http_port = 6;
       */
      public Builder setNodeManagerHttpPort(int value) {
        bitField0_ |= 0x00000020;
        nodeManagerHttpPort_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 node_manager_http_port = 6;
       */
      public Builder clearNodeManagerHttpPort() {
        bitField0_ = (bitField0_ & ~0x00000020);
        nodeManagerHttpPort_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.AMInfoProto)
    }

    static {
      defaultInstance = new AMInfoProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.AMInfoProto)
  }

  public interface TaskAttemptCompletionEventProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    boolean hasAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getAttemptIdOrBuilder();

    // optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
    /**
     * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
     */
    boolean hasStatus();
    /**
     * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto getStatus();

    // optional string map_output_server_address = 3;
    /**
     * optional string map_output_server_address = 3;
     */
    boolean hasMapOutputServerAddress();
    /**
     * optional string map_output_server_address = 3;
     */
    java.lang.String getMapOutputServerAddress();
    /**
     * optional string map_output_server_address = 3;
     */
    com.google.protobuf.ByteString
        getMapOutputServerAddressBytes();

    // optional int32 attempt_run_time = 4;
    /**
     * optional int32 attempt_run_time = 4;
     */
    boolean hasAttemptRunTime();
    /**
     * optional int32 attempt_run_time = 4;
     */
    int getAttemptRunTime();

    // optional int32 event_id = 5;
    /**
     * optional int32 event_id = 5;
     */
    boolean hasEventId();
    /**
     * optional int32 event_id = 5;
     */
    int getEventId();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.TaskAttemptCompletionEventProto}
   */
  public static final class TaskAttemptCompletionEventProto extends
      com.google.protobuf.GeneratedMessage
      implements TaskAttemptCompletionEventProtoOrBuilder {
    // Use TaskAttemptCompletionEventProto.newBuilder() to construct.
    private TaskAttemptCompletionEventProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private TaskAttemptCompletionEventProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final TaskAttemptCompletionEventProto defaultInstance;
    public static TaskAttemptCompletionEventProto getDefaultInstance() {
      return defaultInstance;
    }

    public TaskAttemptCompletionEventProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private TaskAttemptCompletionEventProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = attemptId_.toBuilder();
              }
              attemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(attemptId_);
                attemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                status_ = value;
              }
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              mapOutputServerAddress_ = input.readBytes();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              attemptRunTime_ = input.readInt32();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              eventId_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public TaskAttemptCompletionEventProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new TaskAttemptCompletionEventProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
    public static final int ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto attemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    public boolean hasAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getAttemptId() {
      return attemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getAttemptIdOrBuilder() {
      return attemptId_;
    }

    // optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
    public static final int STATUS_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto status_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
     */
    public boolean hasStatus() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto getStatus() {
      return status_;
    }

    // optional string map_output_server_address = 3;
    public static final int MAP_OUTPUT_SERVER_ADDRESS_FIELD_NUMBER = 3;
    private java.lang.Object mapOutputServerAddress_;
    /**
     * optional string map_output_server_address = 3;
     */
    public boolean hasMapOutputServerAddress() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional string map_output_server_address = 3;
     */
    public java.lang.String getMapOutputServerAddress() {
      java.lang.Object ref = mapOutputServerAddress_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          mapOutputServerAddress_ = s;
        }
        return s;
      }
    }
    /**
     * optional string map_output_server_address = 3;
     */
    public com.google.protobuf.ByteString
        getMapOutputServerAddressBytes() {
      java.lang.Object ref = mapOutputServerAddress_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        mapOutputServerAddress_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional int32 attempt_run_time = 4;
    public static final int ATTEMPT_RUN_TIME_FIELD_NUMBER = 4;
    private int attemptRunTime_;
    /**
     * optional int32 attempt_run_time = 4;
     */
    public boolean hasAttemptRunTime() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    /**
     * optional int32 attempt_run_time = 4;
     */
    public int getAttemptRunTime() {
      return attemptRunTime_;
    }

    // optional int32 event_id = 5;
    public static final int EVENT_ID_FIELD_NUMBER = 5;
    private int eventId_;
    /**
     * optional int32 event_id = 5;
     */
    public boolean hasEventId() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    /**
     * optional int32 event_id = 5;
     */
    public int getEventId() {
      return eventId_;
    }

    private void initFields() {
      attemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      status_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto.TACE_FAILED;
      mapOutputServerAddress_ = "";
      attemptRunTime_ = 0;
      eventId_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, attemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, status_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getMapOutputServerAddressBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeInt32(4, attemptRunTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeInt32(5, eventId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, attemptId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, status_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getMapOutputServerAddressBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(4, attemptRunTime_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(5, eventId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto) obj;

      boolean result = true;
      result = result && (hasAttemptId() == other.hasAttemptId());
      if (hasAttemptId()) {
        result = result && getAttemptId()
            .equals(other.getAttemptId());
      }
      result = result && (hasStatus() == other.hasStatus());
      if (hasStatus()) {
        result = result &&
            (getStatus() == other.getStatus());
      }
      result = result && (hasMapOutputServerAddress() == other.hasMapOutputServerAddress());
      if (hasMapOutputServerAddress()) {
        result = result && getMapOutputServerAddress()
            .equals(other.getMapOutputServerAddress());
      }
      result = result && (hasAttemptRunTime() == other.hasAttemptRunTime());
      if (hasAttemptRunTime()) {
        result = result && (getAttemptRunTime()
            == other.getAttemptRunTime());
      }
      result = result && (hasEventId() == other.hasEventId());
      if (hasEventId()) {
        result = result && (getEventId()
            == other.getEventId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasAttemptId()) {
        hash = (37 * hash) + ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAttemptId().hashCode();
      }
      if (hasStatus()) {
        hash = (37 * hash) + STATUS_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getStatus());
      }
      if (hasMapOutputServerAddress()) {
        hash = (37 * hash) + MAP_OUTPUT_SERVER_ADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getMapOutputServerAddress().hashCode();
      }
      if (hasAttemptRunTime()) {
        hash = (37 * hash) + ATTEMPT_RUN_TIME_FIELD_NUMBER;
        hash = (53 * hash) + getAttemptRunTime();
      }
      if (hasEventId()) {
        hash = (37 * hash) + EVENT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getEventId();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.TaskAttemptCompletionEventProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getAttemptIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (attemptIdBuilder_ == null) {
          attemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          attemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        status_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto.TACE_FAILED;
        bitField0_ = (bitField0_ & ~0x00000002);
        mapOutputServerAddress_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        attemptRunTime_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        eventId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (attemptIdBuilder_ == null) {
          result.attemptId_ = attemptId_;
        } else {
          result.attemptId_ = attemptIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.status_ = status_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.mapOutputServerAddress_ = mapOutputServerAddress_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.attemptRunTime_ = attemptRunTime_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.eventId_ = eventId_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance()) return this;
        if (other.hasAttemptId()) {
          mergeAttemptId(other.getAttemptId());
        }
        if (other.hasStatus()) {
          setStatus(other.getStatus());
        }
        if (other.hasMapOutputServerAddress()) {
          bitField0_ |= 0x00000004;
          mapOutputServerAddress_ = other.mapOutputServerAddress_;
          onChanged();
        }
        if (other.hasAttemptRunTime()) {
          setAttemptRunTime(other.getAttemptRunTime());
        }
        if (other.hasEventId()) {
          setEventId(other.getEventId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto attemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> attemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public boolean hasAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getAttemptId() {
        if (attemptIdBuilder_ == null) {
          return attemptId_;
        } else {
          return attemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public Builder setAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (attemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          attemptId_ = value;
          onChanged();
        } else {
          attemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public Builder setAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (attemptIdBuilder_ == null) {
          attemptId_ = builderForValue.build();
          onChanged();
        } else {
          attemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public Builder mergeAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (attemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              attemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            attemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(attemptId_).mergeFrom(value).buildPartial();
          } else {
            attemptId_ = value;
          }
          onChanged();
        } else {
          attemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public Builder clearAttemptId() {
        if (attemptIdBuilder_ == null) {
          attemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          attemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getAttemptIdOrBuilder() {
        if (attemptIdBuilder_ != null) {
          return attemptIdBuilder_.getMessageOrBuilder();
        } else {
          return attemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getAttemptIdFieldBuilder() {
        if (attemptIdBuilder_ == null) {
          attemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  attemptId_,
                  getParentForChildren(),
                  isClean());
          attemptId_ = null;
        }
        return attemptIdBuilder_;
      }

      // optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto status_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto.TACE_FAILED;
      /**
       * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
       */
      public boolean hasStatus() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto getStatus() {
        return status_;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
       */
      public Builder setStatus(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        status_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
       */
      public Builder clearStatus() {
        bitField0_ = (bitField0_ & ~0x00000002);
        status_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventStatusProto.TACE_FAILED;
        onChanged();
        return this;
      }

      // optional string map_output_server_address = 3;
      private java.lang.Object mapOutputServerAddress_ = "";
      /**
       * optional string map_output_server_address = 3;
       */
      public boolean hasMapOutputServerAddress() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional string map_output_server_address = 3;
       */
      public java.lang.String getMapOutputServerAddress() {
        java.lang.Object ref = mapOutputServerAddress_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          mapOutputServerAddress_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string map_output_server_address = 3;
       */
      public com.google.protobuf.ByteString
          getMapOutputServerAddressBytes() {
        java.lang.Object ref = mapOutputServerAddress_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          mapOutputServerAddress_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string map_output_server_address = 3;
       */
      public Builder setMapOutputServerAddress(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        mapOutputServerAddress_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string map_output_server_address = 3;
       */
      public Builder clearMapOutputServerAddress() {
        bitField0_ = (bitField0_ & ~0x00000004);
        mapOutputServerAddress_ = getDefaultInstance().getMapOutputServerAddress();
        onChanged();
        return this;
      }
      /**
       * optional string map_output_server_address = 3;
       */
      public Builder setMapOutputServerAddressBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        mapOutputServerAddress_ = value;
        onChanged();
        return this;
      }

      // optional int32 attempt_run_time = 4;
      private int attemptRunTime_ ;
      /**
       * optional int32 attempt_run_time = 4;
       */
      public boolean hasAttemptRunTime() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * optional int32 attempt_run_time = 4;
       */
      public int getAttemptRunTime() {
        return attemptRunTime_;
      }
      /**
       * optional int32 attempt_run_time = 4;
       */
      public Builder setAttemptRunTime(int value) {
        bitField0_ |= 0x00000008;
        attemptRunTime_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 attempt_run_time = 4;
       */
      public Builder clearAttemptRunTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        attemptRunTime_ = 0;
        onChanged();
        return this;
      }

      // optional int32 event_id = 5;
      private int eventId_ ;
      /**
       * optional int32 event_id = 5;
       */
      public boolean hasEventId() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      /**
       * optional int32 event_id = 5;
       */
      public int getEventId() {
        return eventId_;
      }
      /**
       * optional int32 event_id = 5;
       */
      public Builder setEventId(int value) {
        bitField0_ |= 0x00000010;
        eventId_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 event_id = 5;
       */
      public Builder clearEventId() {
        bitField0_ = (bitField0_ & ~0x00000010);
        eventId_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.TaskAttemptCompletionEventProto)
    }

    static {
      defaultInstance = new TaskAttemptCompletionEventProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.TaskAttemptCompletionEventProto)
  }

  public interface StringCounterMapProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional string key = 1;
    /**
     * optional string key = 1;
     */
    boolean hasKey();
    /**
     * optional string key = 1;
     */
    java.lang.String getKey();
    /**
     * optional string key = 1;
     */
    com.google.protobuf.ByteString
        getKeyBytes();

    // optional .hadoop.mapreduce.CounterProto value = 2;
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    boolean hasValue();
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto getValue();
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder getValueOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.StringCounterMapProto}
   */
  public static final class StringCounterMapProto extends
      com.google.protobuf.GeneratedMessage
      implements StringCounterMapProtoOrBuilder {
    // Use StringCounterMapProto.newBuilder() to construct.
    private StringCounterMapProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private StringCounterMapProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final StringCounterMapProto defaultInstance;
    public static StringCounterMapProto getDefaultInstance() {
      return defaultInstance;
    }

    public StringCounterMapProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private StringCounterMapProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              key_ = input.readBytes();
              break;
            }
            case 18: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000002) == 0x00000002)) {
                subBuilder = value_.toBuilder();
              }
              value_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(value_);
                value_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000002;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public StringCounterMapProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new StringCounterMapProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional string key = 1;
    public static final int KEY_FIELD_NUMBER = 1;
    private java.lang.Object key_;
    /**
     * optional string key = 1;
     */
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional string key = 1;
     */
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * optional string key = 1;
     */
    public com.google.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional .hadoop.mapreduce.CounterProto value = 2;
    public static final int VALUE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto value_;
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto getValue() {
      return value_;
    }
    /**
     * optional .hadoop.mapreduce.CounterProto value = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder getValueOrBuilder() {
      return value_;
    }

    private void initFields() {
      key_ = "";
      value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getKeyBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeMessage(2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getKeyBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto) obj;

      boolean result = true;
      result = result && (hasKey() == other.hasKey());
      if (hasKey()) {
        result = result && getKey()
            .equals(other.getKey());
      }
      result = result && (hasValue() == other.hasValue());
      if (hasValue()) {
        result = result && getValue()
            .equals(other.getValue());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + getValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.StringCounterMapProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getValueFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        key_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        if (valueBuilder_ == null) {
          value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance();
        } else {
          valueBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.key_ = key_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        if (valueBuilder_ == null) {
          result.value_ = value_;
        } else {
          result.value_ = valueBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          bitField0_ |= 0x00000001;
          key_ = other.key_;
          onChanged();
        }
        if (other.hasValue()) {
          mergeValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional string key = 1;
      private java.lang.Object key_ = "";
      /**
       * optional string key = 1;
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional string key = 1;
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          key_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string key = 1;
       */
      public com.google.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string key = 1;
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        key_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string key = 1;
       */
      public Builder clearKey() {
        bitField0_ = (bitField0_ & ~0x00000001);
        key_ = getDefaultInstance().getKey();
        onChanged();
        return this;
      }
      /**
       * optional string key = 1;
       */
      public Builder setKeyBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        key_ = value;
        onChanged();
        return this;
      }

      // optional .hadoop.mapreduce.CounterProto value = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder> valueBuilder_;
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto getValue() {
        if (valueBuilder_ == null) {
          return value_;
        } else {
          return valueBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public Builder setValue(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto value) {
        if (valueBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          value_ = value;
          onChanged();
        } else {
          valueBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public Builder setValue(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder builderForValue) {
        if (valueBuilder_ == null) {
          value_ = builderForValue.build();
          onChanged();
        } else {
          valueBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public Builder mergeValue(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto value) {
        if (valueBuilder_ == null) {
          if (((bitField0_ & 0x00000002) == 0x00000002) &&
              value_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance()) {
            value_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.newBuilder(value_).mergeFrom(value).buildPartial();
          } else {
            value_ = value;
          }
          onChanged();
        } else {
          valueBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public Builder clearValue() {
        if (valueBuilder_ == null) {
          value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.getDefaultInstance();
          onChanged();
        } else {
          valueBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder getValueBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getValueFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder getValueOrBuilder() {
        if (valueBuilder_ != null) {
          return valueBuilder_.getMessageOrBuilder();
        } else {
          return value_;
        }
      }
      /**
       * optional .hadoop.mapreduce.CounterProto value = 2;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder> 
          getValueFieldBuilder() {
        if (valueBuilder_ == null) {
          valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterProtoOrBuilder>(
                  value_,
                  getParentForChildren(),
                  isClean());
          value_ = null;
        }
        return valueBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.StringCounterMapProto)
    }

    static {
      defaultInstance = new StringCounterMapProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.StringCounterMapProto)
  }

  public interface StringCounterGroupMapProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional string key = 1;
    /**
     * optional string key = 1;
     */
    boolean hasKey();
    /**
     * optional string key = 1;
     */
    java.lang.String getKey();
    /**
     * optional string key = 1;
     */
    com.google.protobuf.ByteString
        getKeyBytes();

    // optional .hadoop.mapreduce.CounterGroupProto value = 2;
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    boolean hasValue();
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto getValue();
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder getValueOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.StringCounterGroupMapProto}
   */
  public static final class StringCounterGroupMapProto extends
      com.google.protobuf.GeneratedMessage
      implements StringCounterGroupMapProtoOrBuilder {
    // Use StringCounterGroupMapProto.newBuilder() to construct.
    private StringCounterGroupMapProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private StringCounterGroupMapProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final StringCounterGroupMapProto defaultInstance;
    public static StringCounterGroupMapProto getDefaultInstance() {
      return defaultInstance;
    }

    public StringCounterGroupMapProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private StringCounterGroupMapProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              key_ = input.readBytes();
              break;
            }
            case 18: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000002) == 0x00000002)) {
                subBuilder = value_.toBuilder();
              }
              value_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(value_);
                value_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000002;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterGroupMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public StringCounterGroupMapProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new StringCounterGroupMapProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional string key = 1;
    public static final int KEY_FIELD_NUMBER = 1;
    private java.lang.Object key_;
    /**
     * optional string key = 1;
     */
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional string key = 1;
     */
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * optional string key = 1;
     */
    public com.google.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    // optional .hadoop.mapreduce.CounterGroupProto value = 2;
    public static final int VALUE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto value_;
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto getValue() {
      return value_;
    }
    /**
     * optional .hadoop.mapreduce.CounterGroupProto value = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder getValueOrBuilder() {
      return value_;
    }

    private void initFields() {
      key_ = "";
      value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getKeyBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeMessage(2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getKeyBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto) obj;

      boolean result = true;
      result = result && (hasKey() == other.hasKey());
      if (hasKey()) {
        result = result && getKey()
            .equals(other.getKey());
      }
      result = result && (hasValue() == other.hasValue());
      if (hasValue()) {
        result = result && getValue()
            .equals(other.getValue());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + getValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.StringCounterGroupMapProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterGroupMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.class, org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getValueFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        key_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        if (valueBuilder_ == null) {
          value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance();
        } else {
          valueBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.key_ = key_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        if (valueBuilder_ == null) {
          result.value_ = value_;
        } else {
          result.value_ = valueBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          bitField0_ |= 0x00000001;
          key_ = other.key_;
          onChanged();
        }
        if (other.hasValue()) {
          mergeValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterGroupMapProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional string key = 1;
      private java.lang.Object key_ = "";
      /**
       * optional string key = 1;
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional string key = 1;
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          key_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * optional string key = 1;
       */
      public com.google.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * optional string key = 1;
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        key_ = value;
        onChanged();
        return this;
      }
      /**
       * optional string key = 1;
       */
      public Builder clearKey() {
        bitField0_ = (bitField0_ & ~0x00000001);
        key_ = getDefaultInstance().getKey();
        onChanged();
        return this;
      }
      /**
       * optional string key = 1;
       */
      public Builder setKeyBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        key_ = value;
        onChanged();
        return this;
      }

      // optional .hadoop.mapreduce.CounterGroupProto value = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder> valueBuilder_;
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto getValue() {
        if (valueBuilder_ == null) {
          return value_;
        } else {
          return valueBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public Builder setValue(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto value) {
        if (valueBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          value_ = value;
          onChanged();
        } else {
          valueBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public Builder setValue(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder builderForValue) {
        if (valueBuilder_ == null) {
          value_ = builderForValue.build();
          onChanged();
        } else {
          valueBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public Builder mergeValue(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto value) {
        if (valueBuilder_ == null) {
          if (((bitField0_ & 0x00000002) == 0x00000002) &&
              value_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance()) {
            value_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.newBuilder(value_).mergeFrom(value).buildPartial();
          } else {
            value_ = value;
          }
          onChanged();
        } else {
          valueBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public Builder clearValue() {
        if (valueBuilder_ == null) {
          value_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.getDefaultInstance();
          onChanged();
        } else {
          valueBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder getValueBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getValueFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder getValueOrBuilder() {
        if (valueBuilder_ != null) {
          return valueBuilder_.getMessageOrBuilder();
        } else {
          return value_;
        }
      }
      /**
       * optional .hadoop.mapreduce.CounterGroupProto value = 2;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder> 
          getValueFieldBuilder() {
        if (valueBuilder_ == null) {
          valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder>(
                  value_,
                  getParentForChildren(),
                  isClean());
          value_ = null;
        }
        return valueBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.StringCounterGroupMapProto)
    }

    static {
      defaultInstance = new StringCounterGroupMapProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.StringCounterGroupMapProto)
  }

  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_JobIdProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_JobIdProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_TaskIdProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_TaskIdProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_TaskAttemptIdProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_CounterProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_CounterProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_CounterGroupProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_CounterGroupProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_CountersProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_CountersProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_TaskReportProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_TaskReportProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_TaskAttemptReportProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_JobReportProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_JobReportProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_AMInfoProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_AMInfoProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_StringCounterMapProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_StringCounterGroupMapProto_fieldAccessorTable;

  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\017mr_protos.proto\022\020hadoop.mapreduce\032\021yar" +
      "n_protos.proto\"I\n\nJobIdProto\022/\n\006app_id\030\001" +
      " \001(\0132\037.hadoop.yarn.ApplicationIdProto\022\n\n" +
      "\002id\030\002 \001(\005\"{\n\013TaskIdProto\022,\n\006job_id\030\001 \001(\013" +
      "2\034.hadoop.mapreduce.JobIdProto\0222\n\ttask_t" +
      "ype\030\002 \001(\0162\037.hadoop.mapreduce.TaskTypePro" +
      "to\022\n\n\002id\030\003 \001(\005\"P\n\022TaskAttemptIdProto\022.\n\007" +
      "task_id\030\001 \001(\0132\035.hadoop.mapreduce.TaskIdP" +
      "roto\022\n\n\002id\030\002 \001(\005\"A\n\014CounterProto\022\014\n\004name" +
      "\030\001 \001(\t\022\024\n\014display_name\030\002 \001(\t\022\r\n\005value\030\003 ",
      "\001(\003\"r\n\021CounterGroupProto\022\014\n\004name\030\001 \001(\t\022\024" +
      "\n\014display_name\030\002 \001(\t\0229\n\010counters\030\003 \003(\0132\'" +
      ".hadoop.mapreduce.StringCounterMapProto\"" +
      "U\n\rCountersProto\022D\n\016counter_groups\030\001 \003(\013" +
      "2,.hadoop.mapreduce.StringCounterGroupMa" +
      "pProto\"\374\002\n\017TaskReportProto\022.\n\007task_id\030\001 " +
      "\001(\0132\035.hadoop.mapreduce.TaskIdProto\0224\n\nta" +
      "sk_state\030\002 \001(\0162 .hadoop.mapreduce.TaskSt" +
      "ateProto\022\020\n\010progress\030\003 \001(\002\022\022\n\nstart_time" +
      "\030\004 \001(\003\022\023\n\013finish_time\030\005 \001(\003\0221\n\010counters\030",
      "\006 \001(\0132\037.hadoop.mapreduce.CountersProto\022>" +
      "\n\020running_attempts\030\007 \003(\0132$.hadoop.mapred" +
      "uce.TaskAttemptIdProto\022@\n\022successful_att" +
      "empt\030\010 \001(\0132$.hadoop.mapreduce.TaskAttemp" +
      "tIdProto\022\023\n\013diagnostics\030\t \003(\t\"\250\004\n\026TaskAt" +
      "temptReportProto\022=\n\017task_attempt_id\030\001 \001(" +
      "\0132$.hadoop.mapreduce.TaskAttemptIdProto\022" +
      "C\n\022task_attempt_state\030\002 \001(\0162\'.hadoop.map" +
      "reduce.TaskAttemptStateProto\022\020\n\010progress" +
      "\030\003 \001(\002\022\022\n\nstart_time\030\004 \001(\003\022\023\n\013finish_tim",
      "e\030\005 \001(\003\0221\n\010counters\030\006 \001(\0132\037.hadoop.mapre" +
      "duce.CountersProto\022\027\n\017diagnostic_info\030\007 " +
      "\001(\t\022\024\n\014state_string\030\010 \001(\t\022+\n\005phase\030\t \001(\016" +
      "2\034.hadoop.mapreduce.PhaseProto\022\033\n\023shuffl" +
      "e_finish_time\030\n \001(\003\022\030\n\020sort_finish_time\030" +
      "\013 \001(\003\022\031\n\021node_manager_host\030\014 \001(\t\022\031\n\021node" +
      "_manager_port\030\r \001(\005\022\036\n\026node_manager_http" +
      "_port\030\016 \001(\005\0223\n\014container_id\030\017 \001(\0132\035.hado" +
      "op.yarn.ContainerIdProto\"\264\003\n\016JobReportPr" +
      "oto\022,\n\006job_id\030\001 \001(\0132\034.hadoop.mapreduce.J",
      "obIdProto\0222\n\tjob_state\030\002 \001(\0162\037.hadoop.ma" +
      "preduce.JobStateProto\022\024\n\014map_progress\030\003 " +
      "\001(\002\022\027\n\017reduce_progress\030\004 \001(\002\022\030\n\020cleanup_" +
      "progress\030\005 \001(\002\022\026\n\016setup_progress\030\006 \001(\002\022\022" +
      "\n\nstart_time\030\007 \001(\003\022\023\n\013finish_time\030\010 \001(\003\022" +
      "\014\n\004user\030\t \001(\t\022\017\n\007jobName\030\n \001(\t\022\023\n\013tracki" +
      "ngUrl\030\013 \001(\t\022\023\n\013diagnostics\030\014 \001(\t\022\017\n\007jobF" +
      "ile\030\r \001(\t\022/\n\010am_infos\030\016 \003(\0132\035.hadoop.map" +
      "reduce.AMInfoProto\022\023\n\013submit_time\030\017 \001(\003\022" +
      "\026\n\007is_uber\030\020 \001(\010:\005false\"\364\001\n\013AMInfoProto\022",
      "F\n\026application_attempt_id\030\001 \001(\0132&.hadoop" +
      ".yarn.ApplicationAttemptIdProto\022\022\n\nstart" +
      "_time\030\002 \001(\003\0223\n\014container_id\030\003 \001(\0132\035.hado" +
      "op.yarn.ContainerIdProto\022\031\n\021node_manager" +
      "_host\030\004 \001(\t\022\031\n\021node_manager_port\030\005 \001(\005\022\036" +
      "\n\026node_manager_http_port\030\006 \001(\005\"\363\001\n\037TaskA" +
      "ttemptCompletionEventProto\0228\n\nattempt_id" +
      "\030\001 \001(\0132$.hadoop.mapreduce.TaskAttemptIdP" +
      "roto\022G\n\006status\030\002 \001(\01627.hadoop.mapreduce." +
      "TaskAttemptCompletionEventStatusProto\022!\n",
      "\031map_output_server_address\030\003 \001(\t\022\030\n\020atte" +
      "mpt_run_time\030\004 \001(\005\022\020\n\010event_id\030\005 \001(\005\"S\n\025" +
      "StringCounterMapProto\022\013\n\003key\030\001 \001(\t\022-\n\005va" +
      "lue\030\002 \001(\0132\036.hadoop.mapreduce.CounterProt" +
      "o\"]\n\032StringCounterGroupMapProto\022\013\n\003key\030\001" +
      " \001(\t\0222\n\005value\030\002 \001(\0132#.hadoop.mapreduce.C" +
      "ounterGroupProto*$\n\rTaskTypeProto\022\007\n\003MAP" +
      "\020\001\022\n\n\006REDUCE\020\002*n\n\016TaskStateProto\022\n\n\006TS_N" +
      "EW\020\001\022\020\n\014TS_SCHEDULED\020\002\022\016\n\nTS_RUNNING\020\003\022\020" +
      "\n\014TS_SUCCEEDED\020\004\022\r\n\tTS_FAILED\020\005\022\r\n\tTS_KI",
      "LLED\020\006*_\n\nPhaseProto\022\016\n\nP_STARTING\020\001\022\t\n\005" +
      "P_MAP\020\002\022\r\n\tP_SHUFFLE\020\003\022\n\n\006P_SORT\020\004\022\014\n\010P_" +
      "REDUCE\020\005\022\r\n\tP_CLEANUP\020\006*\213\001\n\025TaskAttemptS" +
      "tateProto\022\n\n\006TA_NEW\020\001\022\017\n\013TA_STARTING\020\002\022\016" +
      "\n\nTA_RUNNING\020\003\022\025\n\021TA_COMMIT_PENDING\020\004\022\020\n" +
      "\014TA_SUCCEEDED\020\005\022\r\n\tTA_FAILED\020\006\022\r\n\tTA_KIL" +
      "LED\020\007*q\n\rJobStateProto\022\t\n\005J_NEW\020\001\022\014\n\010J_I" +
      "NITED\020\002\022\r\n\tJ_RUNNING\020\003\022\017\n\013J_SUCCEEDED\020\004\022" +
      "\014\n\010J_FAILED\020\005\022\014\n\010J_KILLED\020\006\022\013\n\007J_ERROR\020\007" +
      "*\204\001\n%TaskAttemptCompletionEventStatusPro",
      "to\022\017\n\013TACE_FAILED\020\001\022\017\n\013TACE_KILLED\020\002\022\022\n\016" +
      "TACE_SUCCEEDED\020\003\022\021\n\rTACE_OBSOLETE\020\004\022\022\n\016T" +
      "ACE_TIPFAILED\020\005B6\n$org.apache.hadoop.map" +
      "reduce.v2.protoB\010MRProtos\210\001\001\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          internal_static_hadoop_mapreduce_JobIdProto_descriptor =
            getDescriptor().getMessageTypes().get(0);
          internal_static_hadoop_mapreduce_JobIdProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_JobIdProto_descriptor,
              new java.lang.String[] { "AppId", "Id", });
          internal_static_hadoop_mapreduce_TaskIdProto_descriptor =
            getDescriptor().getMessageTypes().get(1);
          internal_static_hadoop_mapreduce_TaskIdProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_TaskIdProto_descriptor,
              new java.lang.String[] { "JobId", "TaskType", "Id", });
          internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor =
            getDescriptor().getMessageTypes().get(2);
          internal_static_hadoop_mapreduce_TaskAttemptIdProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_TaskAttemptIdProto_descriptor,
              new java.lang.String[] { "TaskId", "Id", });
          internal_static_hadoop_mapreduce_CounterProto_descriptor =
            getDescriptor().getMessageTypes().get(3);
          internal_static_hadoop_mapreduce_CounterProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_CounterProto_descriptor,
              new java.lang.String[] { "Name", "DisplayName", "Value", });
          internal_static_hadoop_mapreduce_CounterGroupProto_descriptor =
            getDescriptor().getMessageTypes().get(4);
          internal_static_hadoop_mapreduce_CounterGroupProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_CounterGroupProto_descriptor,
              new java.lang.String[] { "Name", "DisplayName", "Counters", });
          internal_static_hadoop_mapreduce_CountersProto_descriptor =
            getDescriptor().getMessageTypes().get(5);
          internal_static_hadoop_mapreduce_CountersProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_CountersProto_descriptor,
              new java.lang.String[] { "CounterGroups", });
          internal_static_hadoop_mapreduce_TaskReportProto_descriptor =
            getDescriptor().getMessageTypes().get(6);
          internal_static_hadoop_mapreduce_TaskReportProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_TaskReportProto_descriptor,
              new java.lang.String[] { "TaskId", "TaskState", "Progress", "StartTime", "FinishTime", "Counters", "RunningAttempts", "SuccessfulAttempt", "Diagnostics", });
          internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor =
            getDescriptor().getMessageTypes().get(7);
          internal_static_hadoop_mapreduce_TaskAttemptReportProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_TaskAttemptReportProto_descriptor,
              new java.lang.String[] { "TaskAttemptId", "TaskAttemptState", "Progress", "StartTime", "FinishTime", "Counters", "DiagnosticInfo", "StateString", "Phase", "ShuffleFinishTime", "SortFinishTime", "NodeManagerHost", "NodeManagerPort", "NodeManagerHttpPort", "ContainerId", });
          internal_static_hadoop_mapreduce_JobReportProto_descriptor =
            getDescriptor().getMessageTypes().get(8);
          internal_static_hadoop_mapreduce_JobReportProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_JobReportProto_descriptor,
              new java.lang.String[] { "JobId", "JobState", "MapProgress", "ReduceProgress", "CleanupProgress", "SetupProgress", "StartTime", "FinishTime", "User", "JobName", "TrackingUrl", "Diagnostics", "JobFile", "AmInfos", "SubmitTime", "IsUber", });
          internal_static_hadoop_mapreduce_AMInfoProto_descriptor =
            getDescriptor().getMessageTypes().get(9);
          internal_static_hadoop_mapreduce_AMInfoProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_AMInfoProto_descriptor,
              new java.lang.String[] { "ApplicationAttemptId", "StartTime", "ContainerId", "NodeManagerHost", "NodeManagerPort", "NodeManagerHttpPort", });
          internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor =
            getDescriptor().getMessageTypes().get(10);
          internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_TaskAttemptCompletionEventProto_descriptor,
              new java.lang.String[] { "AttemptId", "Status", "MapOutputServerAddress", "AttemptRunTime", "EventId", });
          internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor =
            getDescriptor().getMessageTypes().get(11);
          internal_static_hadoop_mapreduce_StringCounterMapProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_StringCounterMapProto_descriptor,
              new java.lang.String[] { "Key", "Value", });
          internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor =
            getDescriptor().getMessageTypes().get(12);
          internal_static_hadoop_mapreduce_StringCounterGroupMapProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_StringCounterGroupMapProto_descriptor,
              new java.lang.String[] { "Key", "Value", });
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        }, assigner);
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy