All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos Maven / Gradle / Ivy

There is a newer version: 3.4.1
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: mr_service_protos.proto

package org.apache.hadoop.mapreduce.v2.proto;

public final class MRServiceProtos {
  private MRServiceProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface GetJobReportRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetJobReportRequestProto}
   */
  public static final class GetJobReportRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetJobReportRequestProtoOrBuilder {
    // Use GetJobReportRequestProto.newBuilder() to construct.
    private GetJobReportRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetJobReportRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetJobReportRequestProto defaultInstance;
    public static GetJobReportRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetJobReportRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetJobReportRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetJobReportRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetJobReportRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetJobReportRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetJobReportRequestProto)
    }

    static {
      defaultInstance = new GetJobReportRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetJobReportRequestProto)
  }

  public interface GetJobReportResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobReportProto job_report = 1;
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    boolean hasJobReport();
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport();
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetJobReportResponseProto}
   */
  public static final class GetJobReportResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetJobReportResponseProtoOrBuilder {
    // Use GetJobReportResponseProto.newBuilder() to construct.
    private GetJobReportResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetJobReportResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetJobReportResponseProto defaultInstance;
    public static GetJobReportResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetJobReportResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetJobReportResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobReport_.toBuilder();
              }
              jobReport_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobReport_);
                jobReport_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetJobReportResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetJobReportResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobReportProto job_report = 1;
    public static final int JOB_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto jobReport_;
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    public boolean hasJobReport() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport() {
      return jobReport_;
    }
    /**
     * optional .hadoop.mapreduce.JobReportProto job_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder() {
      return jobReport_;
    }

    private void initFields() {
      jobReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobReport_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobReport_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) obj;

      boolean result = true;
      result = result && (hasJobReport() == other.hasJobReport());
      if (hasJobReport()) {
        result = result && getJobReport()
            .equals(other.getJobReport());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobReport()) {
        hash = (37 * hash) + JOB_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getJobReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetJobReportResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobReportFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobReportBuilder_ == null) {
          jobReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance();
        } else {
          jobReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobReportBuilder_ == null) {
          result.jobReport_ = jobReport_;
        } else {
          result.jobReport_ = jobReportBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance()) return this;
        if (other.hasJobReport()) {
          mergeJobReport(other.getJobReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobReportProto job_report = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto jobReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder> jobReportBuilder_;
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public boolean hasJobReport() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto getJobReport() {
        if (jobReportBuilder_ == null) {
          return jobReport_;
        } else {
          return jobReportBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public Builder setJobReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto value) {
        if (jobReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobReport_ = value;
          onChanged();
        } else {
          jobReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public Builder setJobReport(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder builderForValue) {
        if (jobReportBuilder_ == null) {
          jobReport_ = builderForValue.build();
          onChanged();
        } else {
          jobReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public Builder mergeJobReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto value) {
        if (jobReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance()) {
            jobReport_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.newBuilder(jobReport_).mergeFrom(value).buildPartial();
          } else {
            jobReport_ = value;
          }
          onChanged();
        } else {
          jobReportBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public Builder clearJobReport() {
        if (jobReportBuilder_ == null) {
          jobReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.getDefaultInstance();
          onChanged();
        } else {
          jobReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder getJobReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobReportFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder getJobReportOrBuilder() {
        if (jobReportBuilder_ != null) {
          return jobReportBuilder_.getMessageOrBuilder();
        } else {
          return jobReport_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobReportProto job_report = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder> 
          getJobReportFieldBuilder() {
        if (jobReportBuilder_ == null) {
          jobReportBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder>(
                  jobReport_,
                  getParentForChildren(),
                  isClean());
          jobReport_ = null;
        }
        return jobReportBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetJobReportResponseProto)
    }

    static {
      defaultInstance = new GetJobReportResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetJobReportResponseProto)
  }

  public interface GetTaskReportRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    boolean hasTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskReportRequestProto}
   */
  public static final class GetTaskReportRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskReportRequestProtoOrBuilder {
    // Use GetTaskReportRequestProto.newBuilder() to construct.
    private GetTaskReportRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskReportRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskReportRequestProto defaultInstance;
    public static GetTaskReportRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskReportRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskReportRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskId_.toBuilder();
              }
              taskId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskId_);
                taskId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskReportRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskReportRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    public static final int TASK_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public boolean hasTaskId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
      return taskId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
      return taskId_;
    }

    private void initFields() {
      taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskId() == other.hasTaskId());
      if (hasTaskId()) {
        result = result && getTaskId()
            .equals(other.getTaskId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskId()) {
        hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskReportRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskIdBuilder_ == null) {
          result.taskId_ = taskId_;
        } else {
          result.taskId_ = taskIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskId()) {
          mergeTaskId(other.getTaskId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public boolean hasTaskId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
        if (taskIdBuilder_ == null) {
          return taskId_;
        } else {
          return taskIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskId_ = value;
          onChanged();
        } else {
          taskIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
        if (taskIdBuilder_ == null) {
          taskId_ = builderForValue.build();
          onChanged();
        } else {
          taskIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
            taskId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.newBuilder(taskId_).mergeFrom(value).buildPartial();
          } else {
            taskId_ = value;
          }
          onChanged();
        } else {
          taskIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder clearTaskId() {
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
        if (taskIdBuilder_ != null) {
          return taskIdBuilder_.getMessageOrBuilder();
        } else {
          return taskId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> 
          getTaskIdFieldBuilder() {
        if (taskIdBuilder_ == null) {
          taskIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
                  taskId_,
                  getParentForChildren(),
                  isClean());
          taskId_ = null;
        }
        return taskIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportRequestProto)
    }

    static {
      defaultInstance = new GetTaskReportRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportRequestProto)
  }

  public interface GetTaskReportResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskReportProto task_report = 1;
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    boolean hasTaskReport();
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport();
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskReportResponseProto}
   */
  public static final class GetTaskReportResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskReportResponseProtoOrBuilder {
    // Use GetTaskReportResponseProto.newBuilder() to construct.
    private GetTaskReportResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskReportResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskReportResponseProto defaultInstance;
    public static GetTaskReportResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskReportResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskReportResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskReport_.toBuilder();
              }
              taskReport_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskReport_);
                taskReport_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskReportResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskReportResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskReportProto task_report = 1;
    public static final int TASK_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto taskReport_;
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    public boolean hasTaskReport() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport() {
      return taskReport_;
    }
    /**
     * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder() {
      return taskReport_;
    }

    private void initFields() {
      taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskReport_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskReport_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) obj;

      boolean result = true;
      result = result && (hasTaskReport() == other.hasTaskReport());
      if (hasTaskReport()) {
        result = result && getTaskReport()
            .equals(other.getTaskReport());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskReport()) {
        hash = (37 * hash) + TASK_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getTaskReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskReportResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskReportFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskReportBuilder_ == null) {
          taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance();
        } else {
          taskReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskReportBuilder_ == null) {
          result.taskReport_ = taskReport_;
        } else {
          result.taskReport_ = taskReportBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance()) return this;
        if (other.hasTaskReport()) {
          mergeTaskReport(other.getTaskReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskReportProto task_report = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> taskReportBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public boolean hasTaskReport() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReport() {
        if (taskReportBuilder_ == null) {
          return taskReport_;
        } else {
          return taskReportBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public Builder setTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
        if (taskReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskReport_ = value;
          onChanged();
        } else {
          taskReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public Builder setTaskReport(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
        if (taskReportBuilder_ == null) {
          taskReport_ = builderForValue.build();
          onChanged();
        } else {
          taskReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
        if (taskReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) {
            taskReport_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder(taskReport_).mergeFrom(value).buildPartial();
          } else {
            taskReport_ = value;
          }
          onChanged();
        } else {
          taskReportBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public Builder clearTaskReport() {
        if (taskReportBuilder_ == null) {
          taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance();
          onChanged();
        } else {
          taskReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder getTaskReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskReportFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportOrBuilder() {
        if (taskReportBuilder_ != null) {
          return taskReportBuilder_.getMessageOrBuilder();
        } else {
          return taskReport_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskReportProto task_report = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> 
          getTaskReportFieldBuilder() {
        if (taskReportBuilder_ == null) {
          taskReportBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>(
                  taskReport_,
                  getParentForChildren(),
                  isClean());
          taskReport_ = null;
        }
        return taskReportBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportResponseProto)
    }

    static {
      defaultInstance = new GetTaskReportResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportResponseProto)
  }

  public interface GetTaskAttemptReportRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    boolean hasTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportRequestProto}
   */
  public static final class GetTaskAttemptReportRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskAttemptReportRequestProtoOrBuilder {
    // Use GetTaskAttemptReportRequestProto.newBuilder() to construct.
    private GetTaskAttemptReportRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskAttemptReportRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskAttemptReportRequestProto defaultInstance;
    public static GetTaskAttemptReportRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskAttemptReportRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskAttemptReportRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptId_.toBuilder();
              }
              taskAttemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptId_);
                taskAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskAttemptReportRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskAttemptReportRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public boolean hasTaskAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
      return taskAttemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
      return taskAttemptId_;
    }

    private void initFields() {
      taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptId() == other.hasTaskAttemptId());
      if (hasTaskAttemptId()) {
        result = result && getTaskAttemptId()
            .equals(other.getTaskAttemptId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptId()) {
        hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptIdBuilder_ == null) {
          result.taskAttemptId_ = taskAttemptId_;
        } else {
          result.taskAttemptId_ = taskAttemptIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptId()) {
          mergeTaskAttemptId(other.getTaskAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public boolean hasTaskAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          return taskAttemptId_;
        } else {
          return taskAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptId_ = value;
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            taskAttemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(taskAttemptId_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptId_ = value;
          }
          onChanged();
        } else {
          taskAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder clearTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
        if (taskAttemptIdBuilder_ != null) {
          return taskAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getTaskAttemptIdFieldBuilder() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  taskAttemptId_,
                  getParentForChildren(),
                  isClean());
          taskAttemptId_ = null;
        }
        return taskAttemptIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
    }

    static {
      defaultInstance = new GetTaskAttemptReportRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptReportRequestProto)
  }

  public interface GetTaskAttemptReportResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    boolean hasTaskAttemptReport();
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport();
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportResponseProto}
   */
  public static final class GetTaskAttemptReportResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskAttemptReportResponseProtoOrBuilder {
    // Use GetTaskAttemptReportResponseProto.newBuilder() to construct.
    private GetTaskAttemptReportResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskAttemptReportResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskAttemptReportResponseProto defaultInstance;
    public static GetTaskAttemptReportResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskAttemptReportResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskAttemptReportResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptReport_.toBuilder();
              }
              taskAttemptReport_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptReport_);
                taskAttemptReport_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskAttemptReportResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskAttemptReportResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
    public static final int TASK_ATTEMPT_REPORT_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto taskAttemptReport_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    public boolean hasTaskAttemptReport() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport() {
      return taskAttemptReport_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder() {
      return taskAttemptReport_;
    }

    private void initFields() {
      taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptReport_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptReport_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptReport() == other.hasTaskAttemptReport());
      if (hasTaskAttemptReport()) {
        result = result && getTaskAttemptReport()
            .equals(other.getTaskAttemptReport());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptReport()) {
        hash = (37 * hash) + TASK_ATTEMPT_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptReport().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptReportResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptReportFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptReportBuilder_ == null) {
          taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance();
        } else {
          taskAttemptReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptReportBuilder_ == null) {
          result.taskAttemptReport_ = taskAttemptReport_;
        } else {
          result.taskAttemptReport_ = taskAttemptReportBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptReport()) {
          mergeTaskAttemptReport(other.getTaskAttemptReport());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder> taskAttemptReportBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public boolean hasTaskAttemptReport() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto getTaskAttemptReport() {
        if (taskAttemptReportBuilder_ == null) {
          return taskAttemptReport_;
        } else {
          return taskAttemptReportBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) {
        if (taskAttemptReportBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptReport_ = value;
          onChanged();
        } else {
          taskAttemptReportBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public Builder setTaskAttemptReport(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) {
        if (taskAttemptReportBuilder_ == null) {
          taskAttemptReport_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptReportBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) {
        if (taskAttemptReportBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) {
            taskAttemptReport_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder(taskAttemptReport_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptReport_ = value;
          }
          onChanged();
        } else {
          taskAttemptReportBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public Builder clearTaskAttemptReport() {
        if (taskAttemptReportBuilder_ == null) {
          taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptReportBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptReportFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder getTaskAttemptReportOrBuilder() {
        if (taskAttemptReportBuilder_ != null) {
          return taskAttemptReportBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptReport_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder> 
          getTaskAttemptReportFieldBuilder() {
        if (taskAttemptReportBuilder_ == null) {
          taskAttemptReportBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProtoOrBuilder>(
                  taskAttemptReport_,
                  getParentForChildren(),
                  isClean());
          taskAttemptReport_ = null;
        }
        return taskAttemptReportBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
    }

    static {
      defaultInstance = new GetTaskAttemptReportResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptReportResponseProto)
  }

  public interface GetCountersRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetCountersRequestProto}
   */
  public static final class GetCountersRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetCountersRequestProtoOrBuilder {
    // Use GetCountersRequestProto.newBuilder() to construct.
    private GetCountersRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetCountersRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetCountersRequestProto defaultInstance;
    public static GetCountersRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetCountersRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetCountersRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetCountersRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetCountersRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetCountersRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetCountersRequestProto)
    }

    static {
      defaultInstance = new GetCountersRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetCountersRequestProto)
  }

  public interface GetCountersResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.CountersProto counters = 1;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    boolean hasCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters();
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetCountersResponseProto}
   */
  public static final class GetCountersResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetCountersResponseProtoOrBuilder {
    // Use GetCountersResponseProto.newBuilder() to construct.
    private GetCountersResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetCountersResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetCountersResponseProto defaultInstance;
    public static GetCountersResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetCountersResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetCountersResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = counters_.toBuilder();
              }
              counters_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(counters_);
                counters_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetCountersResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetCountersResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.CountersProto counters = 1;
    public static final int COUNTERS_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_;
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    public boolean hasCounters() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
      return counters_;
    }
    /**
     * optional .hadoop.mapreduce.CountersProto counters = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
      return counters_;
    }

    private void initFields() {
      counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, counters_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, counters_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) obj;

      boolean result = true;
      result = result && (hasCounters() == other.hasCounters());
      if (hasCounters()) {
        result = result && getCounters()
            .equals(other.getCounters());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasCounters()) {
        hash = (37 * hash) + COUNTERS_FIELD_NUMBER;
        hash = (53 * hash) + getCounters().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetCountersResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCountersFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (countersBuilder_ == null) {
          result.counters_ = counters_;
        } else {
          result.counters_ = countersBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance()) return this;
        if (other.hasCounters()) {
          mergeCounters(other.getCounters());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.CountersProto counters = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> countersBuilder_;
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public boolean hasCounters() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto getCounters() {
        if (countersBuilder_ == null) {
          return counters_;
        } else {
          return countersBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public Builder setCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          counters_ = value;
          onChanged();
        } else {
          countersBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public Builder setCounters(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder builderForValue) {
        if (countersBuilder_ == null) {
          counters_ = builderForValue.build();
          onChanged();
        } else {
          countersBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public Builder mergeCounters(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto value) {
        if (countersBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              counters_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance()) {
            counters_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.newBuilder(counters_).mergeFrom(value).buildPartial();
          } else {
            counters_ = value;
          }
          onChanged();
        } else {
          countersBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public Builder clearCounters() {
        if (countersBuilder_ == null) {
          counters_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.getDefaultInstance();
          onChanged();
        } else {
          countersBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder getCountersBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getCountersFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder getCountersOrBuilder() {
        if (countersBuilder_ != null) {
          return countersBuilder_.getMessageOrBuilder();
        } else {
          return counters_;
        }
      }
      /**
       * optional .hadoop.mapreduce.CountersProto counters = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder> 
          getCountersFieldBuilder() {
        if (countersBuilder_ == null) {
          countersBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.CountersProtoOrBuilder>(
                  counters_,
                  getParentForChildren(),
                  isClean());
          counters_ = null;
        }
        return countersBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetCountersResponseProto)
    }

    static {
      defaultInstance = new GetCountersResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetCountersResponseProto)
  }

  public interface GetTaskAttemptCompletionEventsRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();

    // optional int32 from_event_id = 2;
    /**
     * optional int32 from_event_id = 2;
     */
    boolean hasFromEventId();
    /**
     * optional int32 from_event_id = 2;
     */
    int getFromEventId();

    // optional int32 max_events = 3;
    /**
     * optional int32 max_events = 3;
     */
    boolean hasMaxEvents();
    /**
     * optional int32 max_events = 3;
     */
    int getMaxEvents();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto}
   */
  public static final class GetTaskAttemptCompletionEventsRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskAttemptCompletionEventsRequestProtoOrBuilder {
    // Use GetTaskAttemptCompletionEventsRequestProto.newBuilder() to construct.
    private GetTaskAttemptCompletionEventsRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskAttemptCompletionEventsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskAttemptCompletionEventsRequestProto defaultInstance;
    public static GetTaskAttemptCompletionEventsRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskAttemptCompletionEventsRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskAttemptCompletionEventsRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              fromEventId_ = input.readInt32();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              maxEvents_ = input.readInt32();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskAttemptCompletionEventsRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskAttemptCompletionEventsRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    // optional int32 from_event_id = 2;
    public static final int FROM_EVENT_ID_FIELD_NUMBER = 2;
    private int fromEventId_;
    /**
     * optional int32 from_event_id = 2;
     */
    public boolean hasFromEventId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional int32 from_event_id = 2;
     */
    public int getFromEventId() {
      return fromEventId_;
    }

    // optional int32 max_events = 3;
    public static final int MAX_EVENTS_FIELD_NUMBER = 3;
    private int maxEvents_;
    /**
     * optional int32 max_events = 3;
     */
    public boolean hasMaxEvents() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * optional int32 max_events = 3;
     */
    public int getMaxEvents() {
      return maxEvents_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      fromEventId_ = 0;
      maxEvents_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeInt32(2, fromEventId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeInt32(3, maxEvents_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(2, fromEventId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeInt32Size(3, maxEvents_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result && (hasFromEventId() == other.hasFromEventId());
      if (hasFromEventId()) {
        result = result && (getFromEventId()
            == other.getFromEventId());
      }
      result = result && (hasMaxEvents() == other.hasMaxEvents());
      if (hasMaxEvents()) {
        result = result && (getMaxEvents()
            == other.getMaxEvents());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      if (hasFromEventId()) {
        hash = (37 * hash) + FROM_EVENT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getFromEventId();
      }
      if (hasMaxEvents()) {
        hash = (37 * hash) + MAX_EVENTS_FIELD_NUMBER;
        hash = (53 * hash) + getMaxEvents();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        fromEventId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        maxEvents_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.fromEventId_ = fromEventId_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.maxEvents_ = maxEvents_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        if (other.hasFromEventId()) {
          setFromEventId(other.getFromEventId());
        }
        if (other.hasMaxEvents()) {
          setMaxEvents(other.getMaxEvents());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // optional int32 from_event_id = 2;
      private int fromEventId_ ;
      /**
       * optional int32 from_event_id = 2;
       */
      public boolean hasFromEventId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional int32 from_event_id = 2;
       */
      public int getFromEventId() {
        return fromEventId_;
      }
      /**
       * optional int32 from_event_id = 2;
       */
      public Builder setFromEventId(int value) {
        bitField0_ |= 0x00000002;
        fromEventId_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 from_event_id = 2;
       */
      public Builder clearFromEventId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        fromEventId_ = 0;
        onChanged();
        return this;
      }

      // optional int32 max_events = 3;
      private int maxEvents_ ;
      /**
       * optional int32 max_events = 3;
       */
      public boolean hasMaxEvents() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * optional int32 max_events = 3;
       */
      public int getMaxEvents() {
        return maxEvents_;
      }
      /**
       * optional int32 max_events = 3;
       */
      public Builder setMaxEvents(int value) {
        bitField0_ |= 0x00000004;
        maxEvents_ = value;
        onChanged();
        return this;
      }
      /**
       * optional int32 max_events = 3;
       */
      public Builder clearMaxEvents() {
        bitField0_ = (bitField0_ & ~0x00000004);
        maxEvents_ = 0;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
    }

    static {
      defaultInstance = new GetTaskAttemptCompletionEventsRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto)
  }

  public interface GetTaskAttemptCompletionEventsResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    java.util.List 
        getCompletionEventsList();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index);
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    int getCompletionEventsCount();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    java.util.List 
        getCompletionEventsOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto}
   */
  public static final class GetTaskAttemptCompletionEventsResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskAttemptCompletionEventsResponseProtoOrBuilder {
    // Use GetTaskAttemptCompletionEventsResponseProto.newBuilder() to construct.
    private GetTaskAttemptCompletionEventsResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskAttemptCompletionEventsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskAttemptCompletionEventsResponseProto defaultInstance;
    public static GetTaskAttemptCompletionEventsResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskAttemptCompletionEventsResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskAttemptCompletionEventsResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
                completionEvents_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000001;
              }
              completionEvents_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.PARSER, extensionRegistry));
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
          completionEvents_ = java.util.Collections.unmodifiableList(completionEvents_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskAttemptCompletionEventsResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskAttemptCompletionEventsResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    // repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
    public static final int COMPLETION_EVENTS_FIELD_NUMBER = 1;
    private java.util.List completionEvents_;
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    public java.util.List getCompletionEventsList() {
      return completionEvents_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    public java.util.List 
        getCompletionEventsOrBuilderList() {
      return completionEvents_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    public int getCompletionEventsCount() {
      return completionEvents_.size();
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index) {
      return completionEvents_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
        int index) {
      return completionEvents_.get(index);
    }

    private void initFields() {
      completionEvents_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < completionEvents_.size(); i++) {
        output.writeMessage(1, completionEvents_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < completionEvents_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, completionEvents_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) obj;

      boolean result = true;
      result = result && getCompletionEventsList()
          .equals(other.getCompletionEventsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getCompletionEventsCount() > 0) {
        hash = (37 * hash) + COMPLETION_EVENTS_FIELD_NUMBER;
        hash = (53 * hash) + getCompletionEventsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCompletionEventsFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (completionEventsBuilder_ == null) {
          completionEvents_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          completionEventsBuilder_.clear();
        }
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto(this);
        int from_bitField0_ = bitField0_;
        if (completionEventsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            completionEvents_ = java.util.Collections.unmodifiableList(completionEvents_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.completionEvents_ = completionEvents_;
        } else {
          result.completionEvents_ = completionEventsBuilder_.build();
        }
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance()) return this;
        if (completionEventsBuilder_ == null) {
          if (!other.completionEvents_.isEmpty()) {
            if (completionEvents_.isEmpty()) {
              completionEvents_ = other.completionEvents_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureCompletionEventsIsMutable();
              completionEvents_.addAll(other.completionEvents_);
            }
            onChanged();
          }
        } else {
          if (!other.completionEvents_.isEmpty()) {
            if (completionEventsBuilder_.isEmpty()) {
              completionEventsBuilder_.dispose();
              completionEventsBuilder_ = null;
              completionEvents_ = other.completionEvents_;
              bitField0_ = (bitField0_ & ~0x00000001);
              completionEventsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getCompletionEventsFieldBuilder() : null;
            } else {
              completionEventsBuilder_.addAllMessages(other.completionEvents_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
      private java.util.List completionEvents_ =
        java.util.Collections.emptyList();
      private void ensureCompletionEventsIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          completionEvents_ = new java.util.ArrayList(completionEvents_);
          bitField0_ |= 0x00000001;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder> completionEventsBuilder_;

      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public java.util.List getCompletionEventsList() {
        if (completionEventsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(completionEvents_);
        } else {
          return completionEventsBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public int getCompletionEventsCount() {
        if (completionEventsBuilder_ == null) {
          return completionEvents_.size();
        } else {
          return completionEventsBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto getCompletionEvents(int index) {
        if (completionEventsBuilder_ == null) {
          return completionEvents_.get(index);
        } else {
          return completionEventsBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder setCompletionEvents(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
        if (completionEventsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletionEventsIsMutable();
          completionEvents_.set(index, value);
          onChanged();
        } else {
          completionEventsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder setCompletionEvents(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
        if (completionEventsBuilder_ == null) {
          ensureCompletionEventsIsMutable();
          completionEvents_.set(index, builderForValue.build());
          onChanged();
        } else {
          completionEventsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder addCompletionEvents(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
        if (completionEventsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletionEventsIsMutable();
          completionEvents_.add(value);
          onChanged();
        } else {
          completionEventsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder addCompletionEvents(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto value) {
        if (completionEventsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCompletionEventsIsMutable();
          completionEvents_.add(index, value);
          onChanged();
        } else {
          completionEventsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder addCompletionEvents(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
        if (completionEventsBuilder_ == null) {
          ensureCompletionEventsIsMutable();
          completionEvents_.add(builderForValue.build());
          onChanged();
        } else {
          completionEventsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder addCompletionEvents(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder builderForValue) {
        if (completionEventsBuilder_ == null) {
          ensureCompletionEventsIsMutable();
          completionEvents_.add(index, builderForValue.build());
          onChanged();
        } else {
          completionEventsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder addAllCompletionEvents(
          java.lang.Iterable values) {
        if (completionEventsBuilder_ == null) {
          ensureCompletionEventsIsMutable();
          super.addAll(values, completionEvents_);
          onChanged();
        } else {
          completionEventsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder clearCompletionEvents() {
        if (completionEventsBuilder_ == null) {
          completionEvents_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          completionEventsBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public Builder removeCompletionEvents(int index) {
        if (completionEventsBuilder_ == null) {
          ensureCompletionEventsIsMutable();
          completionEvents_.remove(index);
          onChanged();
        } else {
          completionEventsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder getCompletionEventsBuilder(
          int index) {
        return getCompletionEventsFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder getCompletionEventsOrBuilder(
          int index) {
        if (completionEventsBuilder_ == null) {
          return completionEvents_.get(index);  } else {
          return completionEventsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public java.util.List 
           getCompletionEventsOrBuilderList() {
        if (completionEventsBuilder_ != null) {
          return completionEventsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(completionEvents_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder addCompletionEventsBuilder() {
        return getCompletionEventsFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder addCompletionEventsBuilder(
          int index) {
        return getCompletionEventsFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
       */
      public java.util.List 
           getCompletionEventsBuilderList() {
        return getCompletionEventsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder> 
          getCompletionEventsFieldBuilder() {
        if (completionEventsBuilder_ == null) {
          completionEventsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder>(
                  completionEvents_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          completionEvents_ = null;
        }
        return completionEventsBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
    }

    static {
      defaultInstance = new GetTaskAttemptCompletionEventsResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto)
  }

  public interface GetTaskReportsRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();

    // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    boolean hasTaskType();
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskReportsRequestProto}
   */
  public static final class GetTaskReportsRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskReportsRequestProtoOrBuilder {
    // Use GetTaskReportsRequestProto.newBuilder() to construct.
    private GetTaskReportsRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskReportsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskReportsRequestProto defaultInstance;
    public static GetTaskReportsRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskReportsRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskReportsRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto value = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                taskType_ = value;
              }
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskReportsRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskReportsRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
    public static final int TASK_TYPE_FIELD_NUMBER = 2;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto taskType_;
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    public boolean hasTaskType() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
      return taskType_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, taskType_.getNumber());
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, taskType_.getNumber());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result && (hasTaskType() == other.hasTaskType());
      if (hasTaskType()) {
        result = result &&
            (getTaskType() == other.getTaskType());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      if (hasTaskType()) {
        hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getTaskType());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskReportsRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.taskType_ = taskType_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        if (other.hasTaskType()) {
          setTaskType(other.getTaskType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public boolean hasTaskType() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto getTaskType() {
        return taskType_;
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public Builder setTaskType(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        taskType_ = value;
        onChanged();
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
       */
      public Builder clearTaskType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        taskType_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto.MAP;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportsRequestProto)
    }

    static {
      defaultInstance = new GetTaskReportsRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportsRequestProto)
  }

  public interface GetTaskReportsResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    java.util.List 
        getTaskReportsList();
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index);
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    int getTaskReportsCount();
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    java.util.List 
        getTaskReportsOrBuilderList();
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetTaskReportsResponseProto}
   */
  public static final class GetTaskReportsResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetTaskReportsResponseProtoOrBuilder {
    // Use GetTaskReportsResponseProto.newBuilder() to construct.
    private GetTaskReportsResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetTaskReportsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetTaskReportsResponseProto defaultInstance;
    public static GetTaskReportsResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetTaskReportsResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetTaskReportsResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
                taskReports_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000001;
              }
              taskReports_.add(input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.PARSER, extensionRegistry));
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
          taskReports_ = java.util.Collections.unmodifiableList(taskReports_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetTaskReportsResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetTaskReportsResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    // repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
    public static final int TASK_REPORTS_FIELD_NUMBER = 1;
    private java.util.List taskReports_;
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    public java.util.List getTaskReportsList() {
      return taskReports_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    public java.util.List 
        getTaskReportsOrBuilderList() {
      return taskReports_;
    }
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    public int getTaskReportsCount() {
      return taskReports_.size();
    }
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index) {
      return taskReports_.get(index);
    }
    /**
     * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
        int index) {
      return taskReports_.get(index);
    }

    private void initFields() {
      taskReports_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < taskReports_.size(); i++) {
        output.writeMessage(1, taskReports_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < taskReports_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskReports_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) obj;

      boolean result = true;
      result = result && getTaskReportsList()
          .equals(other.getTaskReportsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getTaskReportsCount() > 0) {
        hash = (37 * hash) + TASK_REPORTS_FIELD_NUMBER;
        hash = (53 * hash) + getTaskReportsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetTaskReportsResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskReportsFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskReportsBuilder_ == null) {
          taskReports_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          taskReportsBuilder_.clear();
        }
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto(this);
        int from_bitField0_ = bitField0_;
        if (taskReportsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            taskReports_ = java.util.Collections.unmodifiableList(taskReports_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.taskReports_ = taskReports_;
        } else {
          result.taskReports_ = taskReportsBuilder_.build();
        }
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance()) return this;
        if (taskReportsBuilder_ == null) {
          if (!other.taskReports_.isEmpty()) {
            if (taskReports_.isEmpty()) {
              taskReports_ = other.taskReports_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureTaskReportsIsMutable();
              taskReports_.addAll(other.taskReports_);
            }
            onChanged();
          }
        } else {
          if (!other.taskReports_.isEmpty()) {
            if (taskReportsBuilder_.isEmpty()) {
              taskReportsBuilder_.dispose();
              taskReportsBuilder_ = null;
              taskReports_ = other.taskReports_;
              bitField0_ = (bitField0_ & ~0x00000001);
              taskReportsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getTaskReportsFieldBuilder() : null;
            } else {
              taskReportsBuilder_.addAllMessages(other.taskReports_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
      private java.util.List taskReports_ =
        java.util.Collections.emptyList();
      private void ensureTaskReportsIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          taskReports_ = new java.util.ArrayList(taskReports_);
          bitField0_ |= 0x00000001;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> taskReportsBuilder_;

      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public java.util.List getTaskReportsList() {
        if (taskReportsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(taskReports_);
        } else {
          return taskReportsBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public int getTaskReportsCount() {
        if (taskReportsBuilder_ == null) {
          return taskReports_.size();
        } else {
          return taskReportsBuilder_.getCount();
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto getTaskReports(int index) {
        if (taskReportsBuilder_ == null) {
          return taskReports_.get(index);
        } else {
          return taskReportsBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder setTaskReports(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
        if (taskReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTaskReportsIsMutable();
          taskReports_.set(index, value);
          onChanged();
        } else {
          taskReportsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder setTaskReports(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
        if (taskReportsBuilder_ == null) {
          ensureTaskReportsIsMutable();
          taskReports_.set(index, builderForValue.build());
          onChanged();
        } else {
          taskReportsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder addTaskReports(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
        if (taskReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTaskReportsIsMutable();
          taskReports_.add(value);
          onChanged();
        } else {
          taskReportsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder addTaskReports(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) {
        if (taskReportsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTaskReportsIsMutable();
          taskReports_.add(index, value);
          onChanged();
        } else {
          taskReportsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder addTaskReports(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
        if (taskReportsBuilder_ == null) {
          ensureTaskReportsIsMutable();
          taskReports_.add(builderForValue.build());
          onChanged();
        } else {
          taskReportsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder addTaskReports(
          int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) {
        if (taskReportsBuilder_ == null) {
          ensureTaskReportsIsMutable();
          taskReports_.add(index, builderForValue.build());
          onChanged();
        } else {
          taskReportsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder addAllTaskReports(
          java.lang.Iterable values) {
        if (taskReportsBuilder_ == null) {
          ensureTaskReportsIsMutable();
          super.addAll(values, taskReports_);
          onChanged();
        } else {
          taskReportsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder clearTaskReports() {
        if (taskReportsBuilder_ == null) {
          taskReports_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          taskReportsBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public Builder removeTaskReports(int index) {
        if (taskReportsBuilder_ == null) {
          ensureTaskReportsIsMutable();
          taskReports_.remove(index);
          onChanged();
        } else {
          taskReportsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder getTaskReportsBuilder(
          int index) {
        return getTaskReportsFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder getTaskReportsOrBuilder(
          int index) {
        if (taskReportsBuilder_ == null) {
          return taskReports_.get(index);  } else {
          return taskReportsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public java.util.List 
           getTaskReportsOrBuilderList() {
        if (taskReportsBuilder_ != null) {
          return taskReportsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(taskReports_);
        }
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder addTaskReportsBuilder() {
        return getTaskReportsFieldBuilder().addBuilder(
            org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder addTaskReportsBuilder(
          int index) {
        return getTaskReportsFieldBuilder().addBuilder(
            index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance());
      }
      /**
       * repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
       */
      public java.util.List 
           getTaskReportsBuilderList() {
        return getTaskReportsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder> 
          getTaskReportsFieldBuilder() {
        if (taskReportsBuilder_ == null) {
          taskReportsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProtoOrBuilder>(
                  taskReports_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          taskReports_ = null;
        }
        return taskReportsBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetTaskReportsResponseProto)
    }

    static {
      defaultInstance = new GetTaskReportsResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetTaskReportsResponseProto)
  }

  public interface GetDiagnosticsRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    boolean hasTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetDiagnosticsRequestProto}
   */
  public static final class GetDiagnosticsRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements GetDiagnosticsRequestProtoOrBuilder {
    // Use GetDiagnosticsRequestProto.newBuilder() to construct.
    private GetDiagnosticsRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetDiagnosticsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetDiagnosticsRequestProto defaultInstance;
    public static GetDiagnosticsRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetDiagnosticsRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetDiagnosticsRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptId_.toBuilder();
              }
              taskAttemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptId_);
                taskAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetDiagnosticsRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetDiagnosticsRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public boolean hasTaskAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
      return taskAttemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
      return taskAttemptId_;
    }

    private void initFields() {
      taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptId() == other.hasTaskAttemptId());
      if (hasTaskAttemptId()) {
        result = result && getTaskAttemptId()
            .equals(other.getTaskAttemptId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptId()) {
        hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetDiagnosticsRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptIdBuilder_ == null) {
          result.taskAttemptId_ = taskAttemptId_;
        } else {
          result.taskAttemptId_ = taskAttemptIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptId()) {
          mergeTaskAttemptId(other.getTaskAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public boolean hasTaskAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          return taskAttemptId_;
        } else {
          return taskAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptId_ = value;
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            taskAttemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(taskAttemptId_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptId_ = value;
          }
          onChanged();
        } else {
          taskAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder clearTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
        if (taskAttemptIdBuilder_ != null) {
          return taskAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getTaskAttemptIdFieldBuilder() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  taskAttemptId_,
                  getParentForChildren(),
                  isClean());
          taskAttemptId_ = null;
        }
        return taskAttemptIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetDiagnosticsRequestProto)
    }

    static {
      defaultInstance = new GetDiagnosticsRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetDiagnosticsRequestProto)
  }

  public interface GetDiagnosticsResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // repeated string diagnostics = 1;
    /**
     * repeated string diagnostics = 1;
     */
    java.util.List
    getDiagnosticsList();
    /**
     * repeated string diagnostics = 1;
     */
    int getDiagnosticsCount();
    /**
     * repeated string diagnostics = 1;
     */
    java.lang.String getDiagnostics(int index);
    /**
     * repeated string diagnostics = 1;
     */
    com.google.protobuf.ByteString
        getDiagnosticsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.GetDiagnosticsResponseProto}
   */
  public static final class GetDiagnosticsResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements GetDiagnosticsResponseProtoOrBuilder {
    // Use GetDiagnosticsResponseProto.newBuilder() to construct.
    private GetDiagnosticsResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private GetDiagnosticsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final GetDiagnosticsResponseProto defaultInstance;
    public static GetDiagnosticsResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public GetDiagnosticsResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private GetDiagnosticsResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
                diagnostics_ = new com.google.protobuf.LazyStringArrayList();
                mutable_bitField0_ |= 0x00000001;
              }
              diagnostics_.add(input.readBytes());
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
          diagnostics_ = new com.google.protobuf.UnmodifiableLazyStringList(diagnostics_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public GetDiagnosticsResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new GetDiagnosticsResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    // repeated string diagnostics = 1;
    public static final int DIAGNOSTICS_FIELD_NUMBER = 1;
    private com.google.protobuf.LazyStringList diagnostics_;
    /**
     * repeated string diagnostics = 1;
     */
    public java.util.List
        getDiagnosticsList() {
      return diagnostics_;
    }
    /**
     * repeated string diagnostics = 1;
     */
    public int getDiagnosticsCount() {
      return diagnostics_.size();
    }
    /**
     * repeated string diagnostics = 1;
     */
    public java.lang.String getDiagnostics(int index) {
      return diagnostics_.get(index);
    }
    /**
     * repeated string diagnostics = 1;
     */
    public com.google.protobuf.ByteString
        getDiagnosticsBytes(int index) {
      return diagnostics_.getByteString(index);
    }

    private void initFields() {
      diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < diagnostics_.size(); i++) {
        output.writeBytes(1, diagnostics_.getByteString(i));
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < diagnostics_.size(); i++) {
          dataSize += com.google.protobuf.CodedOutputStream
            .computeBytesSizeNoTag(diagnostics_.getByteString(i));
        }
        size += dataSize;
        size += 1 * getDiagnosticsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) obj;

      boolean result = true;
      result = result && getDiagnosticsList()
          .equals(other.getDiagnosticsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getDiagnosticsCount() > 0) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.GetDiagnosticsResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto(this);
        int from_bitField0_ = bitField0_;
        if (((bitField0_ & 0x00000001) == 0x00000001)) {
          diagnostics_ = new com.google.protobuf.UnmodifiableLazyStringList(
              diagnostics_);
          bitField0_ = (bitField0_ & ~0x00000001);
        }
        result.diagnostics_ = diagnostics_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance()) return this;
        if (!other.diagnostics_.isEmpty()) {
          if (diagnostics_.isEmpty()) {
            diagnostics_ = other.diagnostics_;
            bitField0_ = (bitField0_ & ~0x00000001);
          } else {
            ensureDiagnosticsIsMutable();
            diagnostics_.addAll(other.diagnostics_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // repeated string diagnostics = 1;
      private com.google.protobuf.LazyStringList diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      private void ensureDiagnosticsIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          diagnostics_ = new com.google.protobuf.LazyStringArrayList(diagnostics_);
          bitField0_ |= 0x00000001;
         }
      }
      /**
       * repeated string diagnostics = 1;
       */
      public java.util.List
          getDiagnosticsList() {
        return java.util.Collections.unmodifiableList(diagnostics_);
      }
      /**
       * repeated string diagnostics = 1;
       */
      public int getDiagnosticsCount() {
        return diagnostics_.size();
      }
      /**
       * repeated string diagnostics = 1;
       */
      public java.lang.String getDiagnostics(int index) {
        return diagnostics_.get(index);
      }
      /**
       * repeated string diagnostics = 1;
       */
      public com.google.protobuf.ByteString
          getDiagnosticsBytes(int index) {
        return diagnostics_.getByteString(index);
      }
      /**
       * repeated string diagnostics = 1;
       */
      public Builder setDiagnostics(
          int index, java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.set(index, value);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 1;
       */
      public Builder addDiagnostics(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.add(value);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 1;
       */
      public Builder addAllDiagnostics(
          java.lang.Iterable values) {
        ensureDiagnosticsIsMutable();
        super.addAll(values, diagnostics_);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 1;
       */
      public Builder clearDiagnostics() {
        diagnostics_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * repeated string diagnostics = 1;
       */
      public Builder addDiagnosticsBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureDiagnosticsIsMutable();
        diagnostics_.add(value);
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.GetDiagnosticsResponseProto)
    }

    static {
      defaultInstance = new GetDiagnosticsResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.GetDiagnosticsResponseProto)
  }

  public interface KillJobRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    boolean hasJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId();
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillJobRequestProto}
   */
  public static final class KillJobRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements KillJobRequestProtoOrBuilder {
    // Use KillJobRequestProto.newBuilder() to construct.
    private KillJobRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillJobRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillJobRequestProto defaultInstance;
    public static KillJobRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillJobRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillJobRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = jobId_.toBuilder();
              }
              jobId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(jobId_);
                jobId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillJobRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillJobRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.JobIdProto job_id = 1;
    public static final int JOB_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_;
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public boolean hasJobId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
      return jobId_;
    }
    /**
     * optional .hadoop.mapreduce.JobIdProto job_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
      return jobId_;
    }

    private void initFields() {
      jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, jobId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, jobId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto) obj;

      boolean result = true;
      result = result && (hasJobId() == other.hasJobId());
      if (hasJobId()) {
        result = result && getJobId()
            .equals(other.getJobId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasJobId()) {
        hash = (37 * hash) + JOB_ID_FIELD_NUMBER;
        hash = (53 * hash) + getJobId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillJobRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getJobIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (jobIdBuilder_ == null) {
          result.jobId_ = jobId_;
        } else {
          result.jobId_ = jobIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance()) return this;
        if (other.hasJobId()) {
          mergeJobId(other.getJobId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.JobIdProto job_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> jobIdBuilder_;
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public boolean hasJobId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto getJobId() {
        if (jobIdBuilder_ == null) {
          return jobId_;
        } else {
          return jobIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          jobId_ = value;
          onChanged();
        } else {
          jobIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder setJobId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder builderForValue) {
        if (jobIdBuilder_ == null) {
          jobId_ = builderForValue.build();
          onChanged();
        } else {
          jobIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder mergeJobId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto value) {
        if (jobIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              jobId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance()) {
            jobId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.newBuilder(jobId_).mergeFrom(value).buildPartial();
          } else {
            jobId_ = value;
          }
          onChanged();
        } else {
          jobIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public Builder clearJobId() {
        if (jobIdBuilder_ == null) {
          jobId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.getDefaultInstance();
          onChanged();
        } else {
          jobIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getJobIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder getJobIdOrBuilder() {
        if (jobIdBuilder_ != null) {
          return jobIdBuilder_.getMessageOrBuilder();
        } else {
          return jobId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.JobIdProto job_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> 
          getJobIdFieldBuilder() {
        if (jobIdBuilder_ == null) {
          jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>(
                  jobId_,
                  getParentForChildren(),
                  isClean());
          jobId_ = null;
        }
        return jobIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillJobRequestProto)
    }

    static {
      defaultInstance = new KillJobRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillJobRequestProto)
  }

  public interface KillJobResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillJobResponseProto}
   */
  public static final class KillJobResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements KillJobResponseProtoOrBuilder {
    // Use KillJobResponseProto.newBuilder() to construct.
    private KillJobResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillJobResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillJobResponseProto defaultInstance;
    public static KillJobResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillJobResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillJobResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillJobResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillJobResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) obj;

      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillJobResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto(this);
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillJobResponseProto)
    }

    static {
      defaultInstance = new KillJobResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillJobResponseProto)
  }

  public interface KillTaskRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    boolean hasTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId();
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillTaskRequestProto}
   */
  public static final class KillTaskRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements KillTaskRequestProtoOrBuilder {
    // Use KillTaskRequestProto.newBuilder() to construct.
    private KillTaskRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillTaskRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillTaskRequestProto defaultInstance;
    public static KillTaskRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillTaskRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillTaskRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskId_.toBuilder();
              }
              taskId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskId_);
                taskId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillTaskRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillTaskRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
    public static final int TASK_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_;
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public boolean hasTaskId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
      return taskId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
      return taskId_;
    }

    private void initFields() {
      taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskId() == other.hasTaskId());
      if (hasTaskId()) {
        result = result && getTaskId()
            .equals(other.getTaskId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskId()) {
        hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillTaskRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskIdBuilder_ == null) {
          result.taskId_ = taskId_;
        } else {
          result.taskId_ = taskIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskId()) {
          mergeTaskId(other.getTaskId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskIdProto task_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> taskIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public boolean hasTaskId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto getTaskId() {
        if (taskIdBuilder_ == null) {
          return taskId_;
        } else {
          return taskIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskId_ = value;
          onChanged();
        } else {
          taskIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder setTaskId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder builderForValue) {
        if (taskIdBuilder_ == null) {
          taskId_ = builderForValue.build();
          onChanged();
        } else {
          taskIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder mergeTaskId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto value) {
        if (taskIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) {
            taskId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.newBuilder(taskId_).mergeFrom(value).buildPartial();
          } else {
            taskId_ = value;
          }
          onChanged();
        } else {
          taskIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public Builder clearTaskId() {
        if (taskIdBuilder_ == null) {
          taskId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder getTaskIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder getTaskIdOrBuilder() {
        if (taskIdBuilder_ != null) {
          return taskIdBuilder_.getMessageOrBuilder();
        } else {
          return taskId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskIdProto task_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder> 
          getTaskIdFieldBuilder() {
        if (taskIdBuilder_ == null) {
          taskIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder>(
                  taskId_,
                  getParentForChildren(),
                  isClean());
          taskId_ = null;
        }
        return taskIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskRequestProto)
    }

    static {
      defaultInstance = new KillTaskRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskRequestProto)
  }

  public interface KillTaskResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillTaskResponseProto}
   */
  public static final class KillTaskResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements KillTaskResponseProtoOrBuilder {
    // Use KillTaskResponseProto.newBuilder() to construct.
    private KillTaskResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillTaskResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillTaskResponseProto defaultInstance;
    public static KillTaskResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillTaskResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillTaskResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillTaskResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillTaskResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) obj;

      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillTaskResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto(this);
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskResponseProto)
    }

    static {
      defaultInstance = new KillTaskResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskResponseProto)
  }

  public interface KillTaskAttemptRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    boolean hasTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillTaskAttemptRequestProto}
   */
  public static final class KillTaskAttemptRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements KillTaskAttemptRequestProtoOrBuilder {
    // Use KillTaskAttemptRequestProto.newBuilder() to construct.
    private KillTaskAttemptRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillTaskAttemptRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillTaskAttemptRequestProto defaultInstance;
    public static KillTaskAttemptRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillTaskAttemptRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillTaskAttemptRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptId_.toBuilder();
              }
              taskAttemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptId_);
                taskAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillTaskAttemptRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillTaskAttemptRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public boolean hasTaskAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
      return taskAttemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
      return taskAttemptId_;
    }

    private void initFields() {
      taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptId() == other.hasTaskAttemptId());
      if (hasTaskAttemptId()) {
        result = result && getTaskAttemptId()
            .equals(other.getTaskAttemptId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptId()) {
        hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillTaskAttemptRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptIdBuilder_ == null) {
          result.taskAttemptId_ = taskAttemptId_;
        } else {
          result.taskAttemptId_ = taskAttemptIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptId()) {
          mergeTaskAttemptId(other.getTaskAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public boolean hasTaskAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          return taskAttemptId_;
        } else {
          return taskAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptId_ = value;
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            taskAttemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(taskAttemptId_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptId_ = value;
          }
          onChanged();
        } else {
          taskAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder clearTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
        if (taskAttemptIdBuilder_ != null) {
          return taskAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getTaskAttemptIdFieldBuilder() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  taskAttemptId_,
                  getParentForChildren(),
                  isClean());
          taskAttemptId_ = null;
        }
        return taskAttemptIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskAttemptRequestProto)
    }

    static {
      defaultInstance = new KillTaskAttemptRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskAttemptRequestProto)
  }

  public interface KillTaskAttemptResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.KillTaskAttemptResponseProto}
   */
  public static final class KillTaskAttemptResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements KillTaskAttemptResponseProtoOrBuilder {
    // Use KillTaskAttemptResponseProto.newBuilder() to construct.
    private KillTaskAttemptResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private KillTaskAttemptResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final KillTaskAttemptResponseProto defaultInstance;
    public static KillTaskAttemptResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public KillTaskAttemptResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private KillTaskAttemptResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public KillTaskAttemptResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new KillTaskAttemptResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) obj;

      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.KillTaskAttemptResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto(this);
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.KillTaskAttemptResponseProto)
    }

    static {
      defaultInstance = new KillTaskAttemptResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.KillTaskAttemptResponseProto)
  }

  public interface FailTaskAttemptRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    boolean hasTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId();
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.FailTaskAttemptRequestProto}
   */
  public static final class FailTaskAttemptRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements FailTaskAttemptRequestProtoOrBuilder {
    // Use FailTaskAttemptRequestProto.newBuilder() to construct.
    private FailTaskAttemptRequestProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private FailTaskAttemptRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final FailTaskAttemptRequestProto defaultInstance;
    public static FailTaskAttemptRequestProto getDefaultInstance() {
      return defaultInstance;
    }

    public FailTaskAttemptRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private FailTaskAttemptRequestProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder subBuilder = null;
              if (((bitField0_ & 0x00000001) == 0x00000001)) {
                subBuilder = taskAttemptId_.toBuilder();
              }
              taskAttemptId_ = input.readMessage(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(taskAttemptId_);
                taskAttemptId_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000001;
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public FailTaskAttemptRequestProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new FailTaskAttemptRequestProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
    public static final int TASK_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_;
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public boolean hasTaskAttemptId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
      return taskAttemptId_;
    }
    /**
     * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
     */
    public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
      return taskAttemptId_;
    }

    private void initFields() {
      taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, taskAttemptId_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, taskAttemptId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto) obj;

      boolean result = true;
      result = result && (hasTaskAttemptId() == other.hasTaskAttemptId());
      if (hasTaskAttemptId()) {
        result = result && getTaskAttemptId()
            .equals(other.getTaskAttemptId());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasTaskAttemptId()) {
        hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getTaskAttemptId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.FailTaskAttemptRequestProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getTaskAttemptIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (taskAttemptIdBuilder_ == null) {
          result.taskAttemptId_ = taskAttemptId_;
        } else {
          result.taskAttemptId_ = taskAttemptIdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance()) return this;
        if (other.hasTaskAttemptId()) {
          mergeTaskAttemptId(other.getTaskAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
      private org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> taskAttemptIdBuilder_;
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public boolean hasTaskAttemptId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto getTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          return taskAttemptId_;
        } else {
          return taskAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          taskAttemptId_ = value;
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder setTaskAttemptId(
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder builderForValue) {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = builderForValue.build();
          onChanged();
        } else {
          taskAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder mergeTaskAttemptId(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto value) {
        if (taskAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              taskAttemptId_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance()) {
            taskAttemptId_ =
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.newBuilder(taskAttemptId_).mergeFrom(value).buildPartial();
          } else {
            taskAttemptId_ = value;
          }
          onChanged();
        } else {
          taskAttemptIdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public Builder clearTaskAttemptId() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptId_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance();
          onChanged();
        } else {
          taskAttemptIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder getTaskAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getTaskAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder getTaskAttemptIdOrBuilder() {
        if (taskAttemptIdBuilder_ != null) {
          return taskAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return taskAttemptId_;
        }
      }
      /**
       * optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder> 
          getTaskAttemptIdFieldBuilder() {
        if (taskAttemptIdBuilder_ == null) {
          taskAttemptIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder>(
                  taskAttemptId_,
                  getParentForChildren(),
                  isClean());
          taskAttemptId_ = null;
        }
        return taskAttemptIdBuilder_;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.FailTaskAttemptRequestProto)
    }

    static {
      defaultInstance = new FailTaskAttemptRequestProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.FailTaskAttemptRequestProto)
  }

  public interface FailTaskAttemptResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  /**
   * Protobuf type {@code hadoop.mapreduce.FailTaskAttemptResponseProto}
   */
  public static final class FailTaskAttemptResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements FailTaskAttemptResponseProtoOrBuilder {
    // Use FailTaskAttemptResponseProto.newBuilder() to construct.
    private FailTaskAttemptResponseProto(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private FailTaskAttemptResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final FailTaskAttemptResponseProto defaultInstance;
    public static FailTaskAttemptResponseProto getDefaultInstance() {
      return defaultInstance;
    }

    public FailTaskAttemptResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private FailTaskAttemptResponseProto(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public FailTaskAttemptResponseProto parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new FailTaskAttemptResponseProto(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) obj;

      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.mapreduce.FailTaskAttemptResponseProto}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.class, org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.Builder.class);
      }

      // Construct using org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance();
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto build() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto buildPartial() {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto result = new org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto(this);
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) {
          return mergeFrom((org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto other) {
        if (other == org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hadoop.mapreduce.FailTaskAttemptResponseProto)
    }

    static {
      defaultInstance = new FailTaskAttemptResponseProto(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.FailTaskAttemptResponseProto)
  }

  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable;

  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\027mr_service_protos.proto\022\020hadoop.mapred" +
      "uce\032\016Security.proto\032\017mr_protos.proto\032\021ya" +
      "rn_protos.proto\"H\n\030GetJobReportRequestPr" +
      "oto\022,\n\006job_id\030\001 \001(\0132\034.hadoop.mapreduce.J" +
      "obIdProto\"Q\n\031GetJobReportResponseProto\0224" +
      "\n\njob_report\030\001 \001(\0132 .hadoop.mapreduce.Jo" +
      "bReportProto\"K\n\031GetTaskReportRequestProt" +
      "o\022.\n\007task_id\030\001 \001(\0132\035.hadoop.mapreduce.Ta" +
      "skIdProto\"T\n\032GetTaskReportResponseProto\022" +
      "6\n\013task_report\030\001 \001(\0132!.hadoop.mapreduce.",
      "TaskReportProto\"a\n GetTaskAttemptReportR" +
      "equestProto\022=\n\017task_attempt_id\030\001 \001(\0132$.h" +
      "adoop.mapreduce.TaskAttemptIdProto\"j\n!Ge" +
      "tTaskAttemptReportResponseProto\022E\n\023task_" +
      "attempt_report\030\001 \001(\0132(.hadoop.mapreduce." +
      "TaskAttemptReportProto\"G\n\027GetCountersReq" +
      "uestProto\022,\n\006job_id\030\001 \001(\0132\034.hadoop.mapre" +
      "duce.JobIdProto\"M\n\030GetCountersResponsePr" +
      "oto\0221\n\010counters\030\001 \001(\0132\037.hadoop.mapreduce" +
      ".CountersProto\"\205\001\n*GetTaskAttemptComplet",
      "ionEventsRequestProto\022,\n\006job_id\030\001 \001(\0132\034." +
      "hadoop.mapreduce.JobIdProto\022\025\n\rfrom_even" +
      "t_id\030\002 \001(\005\022\022\n\nmax_events\030\003 \001(\005\"{\n+GetTas" +
      "kAttemptCompletionEventsResponseProto\022L\n" +
      "\021completion_events\030\001 \003(\01321.hadoop.mapred" +
      "uce.TaskAttemptCompletionEventProto\"~\n\032G" +
      "etTaskReportsRequestProto\022,\n\006job_id\030\001 \001(" +
      "\0132\034.hadoop.mapreduce.JobIdProto\0222\n\ttask_" +
      "type\030\002 \001(\0162\037.hadoop.mapreduce.TaskTypePr" +
      "oto\"V\n\033GetTaskReportsResponseProto\0227\n\014ta",
      "sk_reports\030\001 \003(\0132!.hadoop.mapreduce.Task" +
      "ReportProto\"[\n\032GetDiagnosticsRequestProt" +
      "o\022=\n\017task_attempt_id\030\001 \001(\0132$.hadoop.mapr" +
      "educe.TaskAttemptIdProto\"2\n\033GetDiagnosti" +
      "csResponseProto\022\023\n\013diagnostics\030\001 \003(\t\"C\n\023" +
      "KillJobRequestProto\022,\n\006job_id\030\001 \001(\0132\034.ha" +
      "doop.mapreduce.JobIdProto\"\026\n\024KillJobResp" +
      "onseProto\"F\n\024KillTaskRequestProto\022.\n\007tas" +
      "k_id\030\001 \001(\0132\035.hadoop.mapreduce.TaskIdProt" +
      "o\"\027\n\025KillTaskResponseProto\"\\\n\033KillTaskAt",
      "temptRequestProto\022=\n\017task_attempt_id\030\001 \001" +
      "(\0132$.hadoop.mapreduce.TaskAttemptIdProto" +
      "\"\036\n\034KillTaskAttemptResponseProto\"\\\n\033Fail" +
      "TaskAttemptRequestProto\022=\n\017task_attempt_" +
      "id\030\001 \001(\0132$.hadoop.mapreduce.TaskAttemptI" +
      "dProto\"\036\n\034FailTaskAttemptResponseProtoB=" +
      "\n$org.apache.hadoop.mapreduce.v2.protoB\017" +
      "MRServiceProtos\210\001\001\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(0);
          internal_static_hadoop_mapreduce_GetJobReportRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetJobReportRequestProto_descriptor,
              new java.lang.String[] { "JobId", });
          internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(1);
          internal_static_hadoop_mapreduce_GetJobReportResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetJobReportResponseProto_descriptor,
              new java.lang.String[] { "JobReport", });
          internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(2);
          internal_static_hadoop_mapreduce_GetTaskReportRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskReportRequestProto_descriptor,
              new java.lang.String[] { "TaskId", });
          internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(3);
          internal_static_hadoop_mapreduce_GetTaskReportResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskReportResponseProto_descriptor,
              new java.lang.String[] { "TaskReport", });
          internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(4);
          internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskAttemptReportRequestProto_descriptor,
              new java.lang.String[] { "TaskAttemptId", });
          internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(5);
          internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskAttemptReportResponseProto_descriptor,
              new java.lang.String[] { "TaskAttemptReport", });
          internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(6);
          internal_static_hadoop_mapreduce_GetCountersRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetCountersRequestProto_descriptor,
              new java.lang.String[] { "JobId", });
          internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(7);
          internal_static_hadoop_mapreduce_GetCountersResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetCountersResponseProto_descriptor,
              new java.lang.String[] { "Counters", });
          internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(8);
          internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsRequestProto_descriptor,
              new java.lang.String[] { "JobId", "FromEventId", "MaxEvents", });
          internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(9);
          internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskAttemptCompletionEventsResponseProto_descriptor,
              new java.lang.String[] { "CompletionEvents", });
          internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(10);
          internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskReportsRequestProto_descriptor,
              new java.lang.String[] { "JobId", "TaskType", });
          internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(11);
          internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetTaskReportsResponseProto_descriptor,
              new java.lang.String[] { "TaskReports", });
          internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(12);
          internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetDiagnosticsRequestProto_descriptor,
              new java.lang.String[] { "TaskAttemptId", });
          internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(13);
          internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_GetDiagnosticsResponseProto_descriptor,
              new java.lang.String[] { "Diagnostics", });
          internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(14);
          internal_static_hadoop_mapreduce_KillJobRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillJobRequestProto_descriptor,
              new java.lang.String[] { "JobId", });
          internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(15);
          internal_static_hadoop_mapreduce_KillJobResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillJobResponseProto_descriptor,
              new java.lang.String[] { });
          internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(16);
          internal_static_hadoop_mapreduce_KillTaskRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillTaskRequestProto_descriptor,
              new java.lang.String[] { "TaskId", });
          internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(17);
          internal_static_hadoop_mapreduce_KillTaskResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillTaskResponseProto_descriptor,
              new java.lang.String[] { });
          internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(18);
          internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillTaskAttemptRequestProto_descriptor,
              new java.lang.String[] { "TaskAttemptId", });
          internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(19);
          internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_KillTaskAttemptResponseProto_descriptor,
              new java.lang.String[] { });
          internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(20);
          internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_FailTaskAttemptRequestProto_descriptor,
              new java.lang.String[] { "TaskAttemptId", });
          internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(21);
          internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hadoop_mapreduce_FailTaskAttemptResponseProto_descriptor,
              new java.lang.String[] { });
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(),
          org.apache.hadoop.mapreduce.v2.proto.MRProtos.getDescriptor(),
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        }, assigner);
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy