All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.yarn.proto.YarnServiceProtos Maven / Gradle / Ivy

There is a newer version: 3.4.0
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_service_protos.proto

package org.apache.hadoop.yarn.proto;

public final class YarnServiceProtos {
  private YarnServiceProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hadoop.yarn.ContainerUpdateTypeProto}
   */
  public enum ContainerUpdateTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * INCREASE_RESOURCE = 0;
     */
    INCREASE_RESOURCE(0),
    /**
     * DECREASE_RESOURCE = 1;
     */
    DECREASE_RESOURCE(1),
    /**
     * PROMOTE_EXECUTION_TYPE = 2;
     */
    PROMOTE_EXECUTION_TYPE(2),
    /**
     * DEMOTE_EXECUTION_TYPE = 3;
     */
    DEMOTE_EXECUTION_TYPE(3),
    ;

    /**
     * INCREASE_RESOURCE = 0;
     */
    public static final int INCREASE_RESOURCE_VALUE = 0;
    /**
     * DECREASE_RESOURCE = 1;
     */
    public static final int DECREASE_RESOURCE_VALUE = 1;
    /**
     * PROMOTE_EXECUTION_TYPE = 2;
     */
    public static final int PROMOTE_EXECUTION_TYPE_VALUE = 2;
    /**
     * DEMOTE_EXECUTION_TYPE = 3;
     */
    public static final int DEMOTE_EXECUTION_TYPE_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerUpdateTypeProto valueOf(int value) {
      return forNumber(value);
    }

    public static ContainerUpdateTypeProto forNumber(int value) {
      switch (value) {
        case 0: return INCREASE_RESOURCE;
        case 1: return DECREASE_RESOURCE;
        case 2: return PROMOTE_EXECUTION_TYPE;
        case 3: return DEMOTE_EXECUTION_TYPE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerUpdateTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public ContainerUpdateTypeProto findValueByNumber(int number) {
              return ContainerUpdateTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final ContainerUpdateTypeProto[] VALUES = values();

    public static ContainerUpdateTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerUpdateTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerUpdateTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.SchedulerResourceTypes}
   */
  public enum SchedulerResourceTypes
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * MEMORY = 0;
     */
    MEMORY(0),
    /**
     * CPU = 1;
     */
    CPU(1),
    ;

    /**
     * MEMORY = 0;
     */
    public static final int MEMORY_VALUE = 0;
    /**
     * CPU = 1;
     */
    public static final int CPU_VALUE = 1;


    public final int getNumber() {
      return value;
    }

    /**
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static SchedulerResourceTypes valueOf(int value) {
      return forNumber(value);
    }

    public static SchedulerResourceTypes forNumber(int value) {
      switch (value) {
        case 0: return MEMORY;
        case 1: return CPU;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        SchedulerResourceTypes> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public SchedulerResourceTypes findValueByNumber(int number) {
              return SchedulerResourceTypes.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(1);
    }

    private static final SchedulerResourceTypes[] VALUES = values();

    public static SchedulerResourceTypes valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private SchedulerResourceTypes(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.SchedulerResourceTypes)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ApplicationsRequestScopeProto}
   */
  public enum ApplicationsRequestScopeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * ALL = 0;
     */
    ALL(0),
    /**
     * VIEWABLE = 1;
     */
    VIEWABLE(1),
    /**
     * OWN = 2;
     */
    OWN(2),
    ;

    /**
     * ALL = 0;
     */
    public static final int ALL_VALUE = 0;
    /**
     * VIEWABLE = 1;
     */
    public static final int VIEWABLE_VALUE = 1;
    /**
     * OWN = 2;
     */
    public static final int OWN_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ApplicationsRequestScopeProto valueOf(int value) {
      return forNumber(value);
    }

    public static ApplicationsRequestScopeProto forNumber(int value) {
      switch (value) {
        case 0: return ALL;
        case 1: return VIEWABLE;
        case 2: return OWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ApplicationsRequestScopeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public ApplicationsRequestScopeProto findValueByNumber(int number) {
              return ApplicationsRequestScopeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(2);
    }

    private static final ApplicationsRequestScopeProto[] VALUES = values();

    public static ApplicationsRequestScopeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ApplicationsRequestScopeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationsRequestScopeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.LocalizationStateProto}
   */
  public enum LocalizationStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * L_PENDING = 1;
     */
    L_PENDING(1),
    /**
     * L_COMPLETED = 2;
     */
    L_COMPLETED(2),
    /**
     * L_FAILED = 3;
     */
    L_FAILED(3),
    ;

    /**
     * L_PENDING = 1;
     */
    public static final int L_PENDING_VALUE = 1;
    /**
     * L_COMPLETED = 2;
     */
    public static final int L_COMPLETED_VALUE = 2;
    /**
     * L_FAILED = 3;
     */
    public static final int L_FAILED_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static LocalizationStateProto valueOf(int value) {
      return forNumber(value);
    }

    public static LocalizationStateProto forNumber(int value) {
      switch (value) {
        case 1: return L_PENDING;
        case 2: return L_COMPLETED;
        case 3: return L_FAILED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        LocalizationStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public LocalizationStateProto findValueByNumber(int number) {
              return LocalizationStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor().getEnumTypes().get(3);
    }

    private static final LocalizationStateProto[] VALUES = values();

    public static LocalizationStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private LocalizationStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalizationStateProto)
  }

  public interface RegisterApplicationMasterRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterApplicationMasterRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * optional string host = 1;
     */
    boolean hasHost();
    /**
     * optional string host = 1;
     */
    java.lang.String getHost();
    /**
     * optional string host = 1;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * optional int32 rpc_port = 2;
     */
    boolean hasRpcPort();
    /**
     * optional int32 rpc_port = 2;
     */
    int getRpcPort();

    /**
     * optional string tracking_url = 3;
     */
    boolean hasTrackingUrl();
    /**
     * optional string tracking_url = 3;
     */
    java.lang.String getTrackingUrl();
    /**
     * optional string tracking_url = 3;
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;
     */
    java.util.List 
        getPlacementConstraintsList();
    /**
     * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index);
    /**
     * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;
     */
    int getPlacementConstraintsCount();
    /**
     * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;
     */
    java.util.List 
        getPlacementConstraintsOrBuilderList();
    /**
     * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4;
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder(
        int index);
  }
  /**
   * 
   *////////////////////////////////////////////////////
   * ///// AM_RM_Protocol ///////////////////////////////
   * ////////////////////////////////////////////////////
   * 
* * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterRequestProto} */ public static final class RegisterApplicationMasterRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterApplicationMasterRequestProto) RegisterApplicationMasterRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RegisterApplicationMasterRequestProto.newBuilder() to construct. private RegisterApplicationMasterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RegisterApplicationMasterRequestProto() { host_ = ""; trackingUrl_ = ""; placementConstraints_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegisterApplicationMasterRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; host_ = bs; break; } case 16: { bitField0_ |= 0x00000002; rpcPort_ = input.readInt32(); break; } case 26: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; trackingUrl_ = bs; break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) != 0)) { placementConstraints_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } placementConstraints_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) != 0)) { placementConstraints_ = java.util.Collections.unmodifiableList(placementConstraints_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.Builder.class); } private int bitField0_; public static final int HOST_FIELD_NUMBER = 1; private volatile java.lang.Object host_; /** * optional string host = 1; */ public boolean hasHost() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string host = 1; */ public java.lang.String getHost() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } } /** * optional string host = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RPC_PORT_FIELD_NUMBER = 2; private int rpcPort_; /** * optional int32 rpc_port = 2; */ public boolean hasRpcPort() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 rpc_port = 2; */ public int getRpcPort() { return rpcPort_; } public static final int TRACKING_URL_FIELD_NUMBER = 3; private volatile java.lang.Object trackingUrl_; /** * optional string tracking_url = 3; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string tracking_url = 3; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } } /** * optional string tracking_url = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int PLACEMENT_CONSTRAINTS_FIELD_NUMBER = 4; private java.util.List placementConstraints_; /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public java.util.List getPlacementConstraintsList() { return placementConstraints_; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public java.util.List getPlacementConstraintsOrBuilderList() { return placementConstraints_; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public int getPlacementConstraintsCount() { return placementConstraints_.size(); } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index) { return placementConstraints_.get(index); } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder( int index) { return placementConstraints_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getPlacementConstraintsCount(); i++) { if (!getPlacementConstraints(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, host_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, rpcPort_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, trackingUrl_); } for (int i = 0; i < placementConstraints_.size(); i++) { output.writeMessage(4, placementConstraints_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, host_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, rpcPort_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, trackingUrl_); } for (int i = 0; i < placementConstraints_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, placementConstraints_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto) obj; if (hasHost() != other.hasHost()) return false; if (hasHost()) { if (!getHost() .equals(other.getHost())) return false; } if (hasRpcPort() != other.hasRpcPort()) return false; if (hasRpcPort()) { if (getRpcPort() != other.getRpcPort()) return false; } if (hasTrackingUrl() != other.hasTrackingUrl()) return false; if (hasTrackingUrl()) { if (!getTrackingUrl() .equals(other.getTrackingUrl())) return false; } if (!getPlacementConstraintsList() .equals(other.getPlacementConstraintsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHost()) { hash = (37 * hash) + HOST_FIELD_NUMBER; hash = (53 * hash) + getHost().hashCode(); } if (hasRpcPort()) { hash = (37 * hash) + RPC_PORT_FIELD_NUMBER; hash = (53 * hash) + getRpcPort(); } if (hasTrackingUrl()) { hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER; hash = (53 * hash) + getTrackingUrl().hashCode(); } if (getPlacementConstraintsCount() > 0) { hash = (37 * hash) + PLACEMENT_CONSTRAINTS_FIELD_NUMBER; hash = (53 * hash) + getPlacementConstraintsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     *////////////////////////////////////////////////////
     * ///// AM_RM_Protocol ///////////////////////////////
     * ////////////////////////////////////////////////////
     * 
* * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterApplicationMasterRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPlacementConstraintsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); host_ = ""; bitField0_ = (bitField0_ & ~0x00000001); rpcPort_ = 0; bitField0_ = (bitField0_ & ~0x00000002); trackingUrl_ = ""; bitField0_ = (bitField0_ & ~0x00000004); if (placementConstraintsBuilder_ == null) { placementConstraints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { placementConstraintsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.host_ = host_; if (((from_bitField0_ & 0x00000002) != 0)) { result.rpcPort_ = rpcPort_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.trackingUrl_ = trackingUrl_; if (placementConstraintsBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0)) { placementConstraints_ = java.util.Collections.unmodifiableList(placementConstraints_); bitField0_ = (bitField0_ & ~0x00000008); } result.placementConstraints_ = placementConstraints_; } else { result.placementConstraints_ = placementConstraintsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance()) return this; if (other.hasHost()) { bitField0_ |= 0x00000001; host_ = other.host_; onChanged(); } if (other.hasRpcPort()) { setRpcPort(other.getRpcPort()); } if (other.hasTrackingUrl()) { bitField0_ |= 0x00000004; trackingUrl_ = other.trackingUrl_; onChanged(); } if (placementConstraintsBuilder_ == null) { if (!other.placementConstraints_.isEmpty()) { if (placementConstraints_.isEmpty()) { placementConstraints_ = other.placementConstraints_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensurePlacementConstraintsIsMutable(); placementConstraints_.addAll(other.placementConstraints_); } onChanged(); } } else { if (!other.placementConstraints_.isEmpty()) { if (placementConstraintsBuilder_.isEmpty()) { placementConstraintsBuilder_.dispose(); placementConstraintsBuilder_ = null; placementConstraints_ = other.placementConstraints_; bitField0_ = (bitField0_ & ~0x00000008); placementConstraintsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPlacementConstraintsFieldBuilder() : null; } else { placementConstraintsBuilder_.addAllMessages(other.placementConstraints_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getPlacementConstraintsCount(); i++) { if (!getPlacementConstraints(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object host_ = ""; /** * optional string host = 1; */ public boolean hasHost() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string host = 1; */ public java.lang.String getHost() { java.lang.Object ref = host_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string host = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string host = 1; */ public Builder setHost( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; host_ = value; onChanged(); return this; } /** * optional string host = 1; */ public Builder clearHost() { bitField0_ = (bitField0_ & ~0x00000001); host_ = getDefaultInstance().getHost(); onChanged(); return this; } /** * optional string host = 1; */ public Builder setHostBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; host_ = value; onChanged(); return this; } private int rpcPort_ ; /** * optional int32 rpc_port = 2; */ public boolean hasRpcPort() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 rpc_port = 2; */ public int getRpcPort() { return rpcPort_; } /** * optional int32 rpc_port = 2; */ public Builder setRpcPort(int value) { bitField0_ |= 0x00000002; rpcPort_ = value; onChanged(); return this; } /** * optional int32 rpc_port = 2; */ public Builder clearRpcPort() { bitField0_ = (bitField0_ & ~0x00000002); rpcPort_ = 0; onChanged(); return this; } private java.lang.Object trackingUrl_ = ""; /** * optional string tracking_url = 3; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string tracking_url = 3; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string tracking_url = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string tracking_url = 3; */ public Builder setTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; trackingUrl_ = value; onChanged(); return this; } /** * optional string tracking_url = 3; */ public Builder clearTrackingUrl() { bitField0_ = (bitField0_ & ~0x00000004); trackingUrl_ = getDefaultInstance().getTrackingUrl(); onChanged(); return this; } /** * optional string tracking_url = 3; */ public Builder setTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; trackingUrl_ = value; onChanged(); return this; } private java.util.List placementConstraints_ = java.util.Collections.emptyList(); private void ensurePlacementConstraintsIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { placementConstraints_ = new java.util.ArrayList(placementConstraints_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> placementConstraintsBuilder_; /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public java.util.List getPlacementConstraintsList() { if (placementConstraintsBuilder_ == null) { return java.util.Collections.unmodifiableList(placementConstraints_); } else { return placementConstraintsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public int getPlacementConstraintsCount() { if (placementConstraintsBuilder_ == null) { return placementConstraints_.size(); } else { return placementConstraintsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getPlacementConstraints(int index) { if (placementConstraintsBuilder_ == null) { return placementConstraints_.get(index); } else { return placementConstraintsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder setPlacementConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) { if (placementConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePlacementConstraintsIsMutable(); placementConstraints_.set(index, value); onChanged(); } else { placementConstraintsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder setPlacementConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) { if (placementConstraintsBuilder_ == null) { ensurePlacementConstraintsIsMutable(); placementConstraints_.set(index, builderForValue.build()); onChanged(); } else { placementConstraintsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder addPlacementConstraints(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) { if (placementConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePlacementConstraintsIsMutable(); placementConstraints_.add(value); onChanged(); } else { placementConstraintsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder addPlacementConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto value) { if (placementConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePlacementConstraintsIsMutable(); placementConstraints_.add(index, value); onChanged(); } else { placementConstraintsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder addPlacementConstraints( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) { if (placementConstraintsBuilder_ == null) { ensurePlacementConstraintsIsMutable(); placementConstraints_.add(builderForValue.build()); onChanged(); } else { placementConstraintsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder addPlacementConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder builderForValue) { if (placementConstraintsBuilder_ == null) { ensurePlacementConstraintsIsMutable(); placementConstraints_.add(index, builderForValue.build()); onChanged(); } else { placementConstraintsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder addAllPlacementConstraints( java.lang.Iterable values) { if (placementConstraintsBuilder_ == null) { ensurePlacementConstraintsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, placementConstraints_); onChanged(); } else { placementConstraintsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder clearPlacementConstraints() { if (placementConstraintsBuilder_ == null) { placementConstraints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { placementConstraintsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public Builder removePlacementConstraints(int index) { if (placementConstraintsBuilder_ == null) { ensurePlacementConstraintsIsMutable(); placementConstraints_.remove(index); onChanged(); } else { placementConstraintsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder getPlacementConstraintsBuilder( int index) { return getPlacementConstraintsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder getPlacementConstraintsOrBuilder( int index) { if (placementConstraintsBuilder_ == null) { return placementConstraints_.get(index); } else { return placementConstraintsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public java.util.List getPlacementConstraintsOrBuilderList() { if (placementConstraintsBuilder_ != null) { return placementConstraintsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(placementConstraints_); } } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder addPlacementConstraintsBuilder() { return getPlacementConstraintsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder addPlacementConstraintsBuilder( int index) { return getPlacementConstraintsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintMapEntryProto placement_constraints = 4; */ public java.util.List getPlacementConstraintsBuilderList() { return getPlacementConstraintsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder> getPlacementConstraintsFieldBuilder() { if (placementConstraintsBuilder_ == null) { placementConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder>( placementConstraints_, ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); placementConstraints_ = null; } return placementConstraintsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterApplicationMasterRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterApplicationMasterRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RegisterApplicationMasterRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RegisterApplicationMasterRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegisterApplicationMasterResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RegisterApplicationMasterResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ boolean hasMaximumCapability(); /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability(); /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder(); /** * optional bytes client_to_am_token_master_key = 2; */ boolean hasClientToAmTokenMasterKey(); /** * optional bytes client_to_am_token_master_key = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ java.util.List getApplicationACLsList(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ int getApplicationACLsCount(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ java.util.List getApplicationACLsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ java.util.List getContainersFromPreviousAttemptsList(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ int getContainersFromPreviousAttemptsCount(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ java.util.List getContainersFromPreviousAttemptsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index); /** * optional string queue = 5; */ boolean hasQueue(); /** * optional string queue = 5; */ java.lang.String getQueue(); /** * optional string queue = 5; */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ java.util.List getNmTokensFromPreviousAttemptsList(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ int getNmTokensFromPreviousAttemptsCount(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ java.util.List getNmTokensFromPreviousAttemptsOrBuilderList(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder( int index); /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ java.util.List getSchedulerResourceTypesList(); /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ int getSchedulerResourceTypesCount(); /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index); /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ boolean hasResourceProfiles(); /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles(); /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ java.util.List getResourceTypesList(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ int getResourceTypesCount(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ java.util.List getResourceTypesOrBuilderList(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterResponseProto} */ public static final class RegisterApplicationMasterResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RegisterApplicationMasterResponseProto) RegisterApplicationMasterResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RegisterApplicationMasterResponseProto.newBuilder() to construct. private RegisterApplicationMasterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RegisterApplicationMasterResponseProto() { clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; applicationACLs_ = java.util.Collections.emptyList(); containersFromPreviousAttempts_ = java.util.Collections.emptyList(); queue_ = ""; nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList(); schedulerResourceTypes_ = java.util.Collections.emptyList(); resourceTypes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegisterApplicationMasterResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = maximumCapability_.toBuilder(); } maximumCapability_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(maximumCapability_); maximumCapability_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; clientToAmTokenMasterKey_ = input.readBytes(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { applicationACLs_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } applicationACLs_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.PARSER, extensionRegistry)); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) != 0)) { containersFromPreviousAttempts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } containersFromPreviousAttempts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER, extensionRegistry)); break; } case 42: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; queue_ = bs; break; } case 50: { if (!((mutable_bitField0_ & 0x00000020) != 0)) { nmTokensFromPreviousAttempts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } nmTokensFromPreviousAttempts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.PARSER, extensionRegistry)); break; } case 56: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value = org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(7, rawValue); } else { if (!((mutable_bitField0_ & 0x00000040) != 0)) { schedulerResourceTypes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000040; } schedulerResourceTypes_.add(rawValue); } break; } case 58: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value = org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(7, rawValue); } else { if (!((mutable_bitField0_ & 0x00000040) != 0)) { schedulerResourceTypes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000040; } schedulerResourceTypes_.add(rawValue); } } input.popLimit(oldLimit); break; } case 66: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder subBuilder = null; if (((bitField0_ & 0x00000008) != 0)) { subBuilder = resourceProfiles_.toBuilder(); } resourceProfiles_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(resourceProfiles_); resourceProfiles_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 74: { if (!((mutable_bitField0_ & 0x00000100) != 0)) { resourceTypes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000100; } resourceTypes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) != 0)) { applicationACLs_ = java.util.Collections.unmodifiableList(applicationACLs_); } if (((mutable_bitField0_ & 0x00000008) != 0)) { containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } if (((mutable_bitField0_ & 0x00000020) != 0)) { nmTokensFromPreviousAttempts_ = java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_); } if (((mutable_bitField0_ & 0x00000040) != 0)) { schedulerResourceTypes_ = java.util.Collections.unmodifiableList(schedulerResourceTypes_); } if (((mutable_bitField0_ & 0x00000100) != 0)) { resourceTypes_ = java.util.Collections.unmodifiableList(resourceTypes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder.class); } private int bitField0_; public static final int MAXIMUMCAPABILITY_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_; /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public boolean hasMaximumCapability() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } public static final int CLIENT_TO_AM_TOKEN_MASTER_KEY_FIELD_NUMBER = 2; private org.apache.hadoop.thirdparty.protobuf.ByteString clientToAmTokenMasterKey_; /** * optional bytes client_to_am_token_master_key = 2; */ public boolean hasClientToAmTokenMasterKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bytes client_to_am_token_master_key = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey() { return clientToAmTokenMasterKey_; } public static final int APPLICATION_ACLS_FIELD_NUMBER = 3; private java.util.List applicationACLs_; /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public java.util.List getApplicationACLsList() { return applicationACLs_; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public java.util.List getApplicationACLsOrBuilderList() { return applicationACLs_; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public int getApplicationACLsCount() { return applicationACLs_.size(); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) { return applicationACLs_.get(index); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index) { return applicationACLs_.get(index); } public static final int CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 4; private java.util.List containersFromPreviousAttempts_; /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public java.util.List getContainersFromPreviousAttemptsList() { return containersFromPreviousAttempts_; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public java.util.List getContainersFromPreviousAttemptsOrBuilderList() { return containersFromPreviousAttempts_; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public int getContainersFromPreviousAttemptsCount() { return containersFromPreviousAttempts_.size(); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) { return containersFromPreviousAttempts_.get(index); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index) { return containersFromPreviousAttempts_.get(index); } public static final int QUEUE_FIELD_NUMBER = 5; private volatile java.lang.Object queue_; /** * optional string queue = 5; */ public boolean hasQueue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string queue = 5; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 5; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int NM_TOKENS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 6; private java.util.List nmTokensFromPreviousAttempts_; /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public java.util.List getNmTokensFromPreviousAttemptsList() { return nmTokensFromPreviousAttempts_; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public java.util.List getNmTokensFromPreviousAttemptsOrBuilderList() { return nmTokensFromPreviousAttempts_; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public int getNmTokensFromPreviousAttemptsCount() { return nmTokensFromPreviousAttempts_.size(); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index) { return nmTokensFromPreviousAttempts_.get(index); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder( int index) { return nmTokensFromPreviousAttempts_.get(index); } public static final int SCHEDULER_RESOURCE_TYPES_FIELD_NUMBER = 7; private java.util.List schedulerResourceTypes_; private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes> schedulerResourceTypes_converter_ = new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>() { public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes convert(java.lang.Integer from) { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes result = org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.valueOf(from); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes.MEMORY : result; } }; /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public java.util.List getSchedulerResourceTypesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>(schedulerResourceTypes_, schedulerResourceTypes_converter_); } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public int getSchedulerResourceTypesCount() { return schedulerResourceTypes_.size(); } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index) { return schedulerResourceTypes_converter_.convert(schedulerResourceTypes_.get(index)); } public static final int RESOURCE_PROFILES_FIELD_NUMBER = 8; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_; /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public boolean hasResourceProfiles() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } public static final int RESOURCE_TYPES_FIELD_NUMBER = 9; private java.util.List resourceTypes_; /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public java.util.List getResourceTypesList() { return resourceTypes_; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public java.util.List getResourceTypesOrBuilderList() { return resourceTypes_; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public int getResourceTypesCount() { return resourceTypes_.size(); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index) { return resourceTypes_.get(index); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder( int index) { return resourceTypes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasMaximumCapability()) { if (!getMaximumCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) { if (!getContainersFromPreviousAttempts(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getNmTokensFromPreviousAttemptsCount(); i++) { if (!getNmTokensFromPreviousAttempts(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasResourceProfiles()) { if (!getResourceProfiles().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getResourceTypesCount(); i++) { if (!getResourceTypes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMaximumCapability()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeBytes(2, clientToAmTokenMasterKey_); } for (int i = 0; i < applicationACLs_.size(); i++) { output.writeMessage(3, applicationACLs_.get(i)); } for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) { output.writeMessage(4, containersFromPreviousAttempts_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, queue_); } for (int i = 0; i < nmTokensFromPreviousAttempts_.size(); i++) { output.writeMessage(6, nmTokensFromPreviousAttempts_.get(i)); } for (int i = 0; i < schedulerResourceTypes_.size(); i++) { output.writeEnum(7, schedulerResourceTypes_.get(i)); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(8, getResourceProfiles()); } for (int i = 0; i < resourceTypes_.size(); i++) { output.writeMessage(9, resourceTypes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getMaximumCapability()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBytesSize(2, clientToAmTokenMasterKey_); } for (int i = 0; i < applicationACLs_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, applicationACLs_.get(i)); } for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, containersFromPreviousAttempts_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, queue_); } for (int i = 0; i < nmTokensFromPreviousAttempts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, nmTokensFromPreviousAttempts_.get(i)); } { int dataSize = 0; for (int i = 0; i < schedulerResourceTypes_.size(); i++) { dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSizeNoTag(schedulerResourceTypes_.get(i)); } size += dataSize; size += 1 * schedulerResourceTypes_.size(); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(8, getResourceProfiles()); } for (int i = 0; i < resourceTypes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(9, resourceTypes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) obj; if (hasMaximumCapability() != other.hasMaximumCapability()) return false; if (hasMaximumCapability()) { if (!getMaximumCapability() .equals(other.getMaximumCapability())) return false; } if (hasClientToAmTokenMasterKey() != other.hasClientToAmTokenMasterKey()) return false; if (hasClientToAmTokenMasterKey()) { if (!getClientToAmTokenMasterKey() .equals(other.getClientToAmTokenMasterKey())) return false; } if (!getApplicationACLsList() .equals(other.getApplicationACLsList())) return false; if (!getContainersFromPreviousAttemptsList() .equals(other.getContainersFromPreviousAttemptsList())) return false; if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (!getNmTokensFromPreviousAttemptsList() .equals(other.getNmTokensFromPreviousAttemptsList())) return false; if (!schedulerResourceTypes_.equals(other.schedulerResourceTypes_)) return false; if (hasResourceProfiles() != other.hasResourceProfiles()) return false; if (hasResourceProfiles()) { if (!getResourceProfiles() .equals(other.getResourceProfiles())) return false; } if (!getResourceTypesList() .equals(other.getResourceTypesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMaximumCapability()) { hash = (37 * hash) + MAXIMUMCAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getMaximumCapability().hashCode(); } if (hasClientToAmTokenMasterKey()) { hash = (37 * hash) + CLIENT_TO_AM_TOKEN_MASTER_KEY_FIELD_NUMBER; hash = (53 * hash) + getClientToAmTokenMasterKey().hashCode(); } if (getApplicationACLsCount() > 0) { hash = (37 * hash) + APPLICATION_ACLS_FIELD_NUMBER; hash = (53 * hash) + getApplicationACLsList().hashCode(); } if (getContainersFromPreviousAttemptsCount() > 0) { hash = (37 * hash) + CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + getContainersFromPreviousAttemptsList().hashCode(); } if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (getNmTokensFromPreviousAttemptsCount() > 0) { hash = (37 * hash) + NM_TOKENS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + getNmTokensFromPreviousAttemptsList().hashCode(); } if (getSchedulerResourceTypesCount() > 0) { hash = (37 * hash) + SCHEDULER_RESOURCE_TYPES_FIELD_NUMBER; hash = (53 * hash) + schedulerResourceTypes_.hashCode(); } if (hasResourceProfiles()) { hash = (37 * hash) + RESOURCE_PROFILES_FIELD_NUMBER; hash = (53 * hash) + getResourceProfiles().hashCode(); } if (getResourceTypesCount() > 0) { hash = (37 * hash) + RESOURCE_TYPES_FIELD_NUMBER; hash = (53 * hash) + getResourceTypesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RegisterApplicationMasterResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RegisterApplicationMasterResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMaximumCapabilityFieldBuilder(); getApplicationACLsFieldBuilder(); getContainersFromPreviousAttemptsFieldBuilder(); getNmTokensFromPreviousAttemptsFieldBuilder(); getResourceProfilesFieldBuilder(); getResourceTypesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (maximumCapabilityBuilder_ == null) { maximumCapability_ = null; } else { maximumCapabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); if (applicationACLsBuilder_ == null) { applicationACLs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { applicationACLsBuilder_.clear(); } if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { containersFromPreviousAttemptsBuilder_.clear(); } queue_ = ""; bitField0_ = (bitField0_ & ~0x00000010); if (nmTokensFromPreviousAttemptsBuilder_ == null) { nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { nmTokensFromPreviousAttemptsBuilder_.clear(); } schedulerResourceTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); if (resourceProfilesBuilder_ == null) { resourceProfiles_ = null; } else { resourceProfilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); if (resourceTypesBuilder_ == null) { resourceTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); } else { resourceTypesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (maximumCapabilityBuilder_ == null) { result.maximumCapability_ = maximumCapability_; } else { result.maximumCapability_ = maximumCapabilityBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.clientToAmTokenMasterKey_ = clientToAmTokenMasterKey_; if (applicationACLsBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { applicationACLs_ = java.util.Collections.unmodifiableList(applicationACLs_); bitField0_ = (bitField0_ & ~0x00000004); } result.applicationACLs_ = applicationACLs_; } else { result.applicationACLs_ = applicationACLsBuilder_.build(); } if (containersFromPreviousAttemptsBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0)) { containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); bitField0_ = (bitField0_ & ~0x00000008); } result.containersFromPreviousAttempts_ = containersFromPreviousAttempts_; } else { result.containersFromPreviousAttempts_ = containersFromPreviousAttemptsBuilder_.build(); } if (((from_bitField0_ & 0x00000010) != 0)) { to_bitField0_ |= 0x00000004; } result.queue_ = queue_; if (nmTokensFromPreviousAttemptsBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { nmTokensFromPreviousAttempts_ = java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_); bitField0_ = (bitField0_ & ~0x00000020); } result.nmTokensFromPreviousAttempts_ = nmTokensFromPreviousAttempts_; } else { result.nmTokensFromPreviousAttempts_ = nmTokensFromPreviousAttemptsBuilder_.build(); } if (((bitField0_ & 0x00000040) != 0)) { schedulerResourceTypes_ = java.util.Collections.unmodifiableList(schedulerResourceTypes_); bitField0_ = (bitField0_ & ~0x00000040); } result.schedulerResourceTypes_ = schedulerResourceTypes_; if (((from_bitField0_ & 0x00000080) != 0)) { if (resourceProfilesBuilder_ == null) { result.resourceProfiles_ = resourceProfiles_; } else { result.resourceProfiles_ = resourceProfilesBuilder_.build(); } to_bitField0_ |= 0x00000008; } if (resourceTypesBuilder_ == null) { if (((bitField0_ & 0x00000100) != 0)) { resourceTypes_ = java.util.Collections.unmodifiableList(resourceTypes_); bitField0_ = (bitField0_ & ~0x00000100); } result.resourceTypes_ = resourceTypes_; } else { result.resourceTypes_ = resourceTypesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance()) return this; if (other.hasMaximumCapability()) { mergeMaximumCapability(other.getMaximumCapability()); } if (other.hasClientToAmTokenMasterKey()) { setClientToAmTokenMasterKey(other.getClientToAmTokenMasterKey()); } if (applicationACLsBuilder_ == null) { if (!other.applicationACLs_.isEmpty()) { if (applicationACLs_.isEmpty()) { applicationACLs_ = other.applicationACLs_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureApplicationACLsIsMutable(); applicationACLs_.addAll(other.applicationACLs_); } onChanged(); } } else { if (!other.applicationACLs_.isEmpty()) { if (applicationACLsBuilder_.isEmpty()) { applicationACLsBuilder_.dispose(); applicationACLsBuilder_ = null; applicationACLs_ = other.applicationACLs_; bitField0_ = (bitField0_ & ~0x00000004); applicationACLsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationACLsFieldBuilder() : null; } else { applicationACLsBuilder_.addAllMessages(other.applicationACLs_); } } } if (containersFromPreviousAttemptsBuilder_ == null) { if (!other.containersFromPreviousAttempts_.isEmpty()) { if (containersFromPreviousAttempts_.isEmpty()) { containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.addAll(other.containersFromPreviousAttempts_); } onChanged(); } } else { if (!other.containersFromPreviousAttempts_.isEmpty()) { if (containersFromPreviousAttemptsBuilder_.isEmpty()) { containersFromPreviousAttemptsBuilder_.dispose(); containersFromPreviousAttemptsBuilder_ = null; containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00000008); containersFromPreviousAttemptsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainersFromPreviousAttemptsFieldBuilder() : null; } else { containersFromPreviousAttemptsBuilder_.addAllMessages(other.containersFromPreviousAttempts_); } } } if (other.hasQueue()) { bitField0_ |= 0x00000010; queue_ = other.queue_; onChanged(); } if (nmTokensFromPreviousAttemptsBuilder_ == null) { if (!other.nmTokensFromPreviousAttempts_.isEmpty()) { if (nmTokensFromPreviousAttempts_.isEmpty()) { nmTokensFromPreviousAttempts_ = other.nmTokensFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.addAll(other.nmTokensFromPreviousAttempts_); } onChanged(); } } else { if (!other.nmTokensFromPreviousAttempts_.isEmpty()) { if (nmTokensFromPreviousAttemptsBuilder_.isEmpty()) { nmTokensFromPreviousAttemptsBuilder_.dispose(); nmTokensFromPreviousAttemptsBuilder_ = null; nmTokensFromPreviousAttempts_ = other.nmTokensFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00000020); nmTokensFromPreviousAttemptsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNmTokensFromPreviousAttemptsFieldBuilder() : null; } else { nmTokensFromPreviousAttemptsBuilder_.addAllMessages(other.nmTokensFromPreviousAttempts_); } } } if (!other.schedulerResourceTypes_.isEmpty()) { if (schedulerResourceTypes_.isEmpty()) { schedulerResourceTypes_ = other.schedulerResourceTypes_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureSchedulerResourceTypesIsMutable(); schedulerResourceTypes_.addAll(other.schedulerResourceTypes_); } onChanged(); } if (other.hasResourceProfiles()) { mergeResourceProfiles(other.getResourceProfiles()); } if (resourceTypesBuilder_ == null) { if (!other.resourceTypes_.isEmpty()) { if (resourceTypes_.isEmpty()) { resourceTypes_ = other.resourceTypes_; bitField0_ = (bitField0_ & ~0x00000100); } else { ensureResourceTypesIsMutable(); resourceTypes_.addAll(other.resourceTypes_); } onChanged(); } } else { if (!other.resourceTypes_.isEmpty()) { if (resourceTypesBuilder_.isEmpty()) { resourceTypesBuilder_.dispose(); resourceTypesBuilder_ = null; resourceTypes_ = other.resourceTypes_; bitField0_ = (bitField0_ & ~0x00000100); resourceTypesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResourceTypesFieldBuilder() : null; } else { resourceTypesBuilder_.addAllMessages(other.resourceTypes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasMaximumCapability()) { if (!getMaximumCapability().isInitialized()) { return false; } } for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) { if (!getContainersFromPreviousAttempts(i).isInitialized()) { return false; } } for (int i = 0; i < getNmTokensFromPreviousAttemptsCount(); i++) { if (!getNmTokensFromPreviousAttempts(i).isInitialized()) { return false; } } if (hasResourceProfiles()) { if (!getResourceProfiles().isInitialized()) { return false; } } for (int i = 0; i < getResourceTypesCount(); i++) { if (!getResourceTypes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> maximumCapabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public boolean hasMaximumCapability() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() { if (maximumCapabilityBuilder_ == null) { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } else { return maximumCapabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public Builder setMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (maximumCapabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } maximumCapability_ = value; onChanged(); } else { maximumCapabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public Builder setMaximumCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (maximumCapabilityBuilder_ == null) { maximumCapability_ = builderForValue.build(); onChanged(); } else { maximumCapabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public Builder mergeMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (maximumCapabilityBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && maximumCapability_ != null && maximumCapability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { maximumCapability_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(maximumCapability_).mergeFrom(value).buildPartial(); } else { maximumCapability_ = value; } onChanged(); } else { maximumCapabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public Builder clearMaximumCapability() { if (maximumCapabilityBuilder_ == null) { maximumCapability_ = null; onChanged(); } else { maximumCapabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMaximumCapabilityBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMaximumCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() { if (maximumCapabilityBuilder_ != null) { return maximumCapabilityBuilder_.getMessageOrBuilder(); } else { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getMaximumCapabilityFieldBuilder() { if (maximumCapabilityBuilder_ == null) { maximumCapabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getMaximumCapability(), getParentForChildren(), isClean()); maximumCapability_ = null; } return maximumCapabilityBuilder_; } private org.apache.hadoop.thirdparty.protobuf.ByteString clientToAmTokenMasterKey_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes client_to_am_token_master_key = 2; */ public boolean hasClientToAmTokenMasterKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bytes client_to_am_token_master_key = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getClientToAmTokenMasterKey() { return clientToAmTokenMasterKey_; } /** * optional bytes client_to_am_token_master_key = 2; */ public Builder setClientToAmTokenMasterKey(org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; clientToAmTokenMasterKey_ = value; onChanged(); return this; } /** * optional bytes client_to_am_token_master_key = 2; */ public Builder clearClientToAmTokenMasterKey() { bitField0_ = (bitField0_ & ~0x00000002); clientToAmTokenMasterKey_ = getDefaultInstance().getClientToAmTokenMasterKey(); onChanged(); return this; } private java.util.List applicationACLs_ = java.util.Collections.emptyList(); private void ensureApplicationACLsIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { applicationACLs_ = new java.util.ArrayList(applicationACLs_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> applicationACLsBuilder_; /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public java.util.List getApplicationACLsList() { if (applicationACLsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationACLs_); } else { return applicationACLsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public int getApplicationACLsCount() { if (applicationACLsBuilder_ == null) { return applicationACLs_.size(); } else { return applicationACLsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) { if (applicationACLsBuilder_ == null) { return applicationACLs_.get(index); } else { return applicationACLsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder setApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.set(index, value); onChanged(); } else { applicationACLsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder setApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.set(index, builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder addApplicationACLs(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.add(value); onChanged(); } else { applicationACLsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder addApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.add(index, value); onChanged(); } else { applicationACLsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder addApplicationACLs( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.add(builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder addApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.add(index, builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder addAllApplicationACLs( java.lang.Iterable values) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationACLs_); onChanged(); } else { applicationACLsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder clearApplicationACLs() { if (applicationACLsBuilder_ == null) { applicationACLs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { applicationACLsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public Builder removeApplicationACLs(int index) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.remove(index); onChanged(); } else { applicationACLsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder getApplicationACLsBuilder( int index) { return getApplicationACLsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index) { if (applicationACLsBuilder_ == null) { return applicationACLs_.get(index); } else { return applicationACLsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public java.util.List getApplicationACLsOrBuilderList() { if (applicationACLsBuilder_ != null) { return applicationACLsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationACLs_); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder() { return getApplicationACLsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder( int index) { return getApplicationACLsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 3; */ public java.util.List getApplicationACLsBuilderList() { return getApplicationACLsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> getApplicationACLsFieldBuilder() { if (applicationACLsBuilder_ == null) { applicationACLsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder>( applicationACLs_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); applicationACLs_ = null; } return applicationACLsBuilder_; } private java.util.List containersFromPreviousAttempts_ = java.util.Collections.emptyList(); private void ensureContainersFromPreviousAttemptsIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { containersFromPreviousAttempts_ = new java.util.ArrayList(containersFromPreviousAttempts_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersFromPreviousAttemptsBuilder_; /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public java.util.List getContainersFromPreviousAttemptsList() { if (containersFromPreviousAttemptsBuilder_ == null) { return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } else { return containersFromPreviousAttemptsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public int getContainersFromPreviousAttemptsCount() { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.size(); } else { return containersFromPreviousAttemptsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.get(index); } else { return containersFromPreviousAttemptsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder setContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.set(index, value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder setContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.set(index, builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder addContainersFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder addContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(index, value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder addContainersFromPreviousAttempts( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder addContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(index, builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder addAllContainersFromPreviousAttempts( java.lang.Iterable values) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containersFromPreviousAttempts_); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder clearContainersFromPreviousAttempts() { if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { containersFromPreviousAttemptsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public Builder removeContainersFromPreviousAttempts(int index) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.remove(index); onChanged(); } else { containersFromPreviousAttemptsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersFromPreviousAttemptsBuilder( int index) { return getContainersFromPreviousAttemptsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index) { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.get(index); } else { return containersFromPreviousAttemptsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public java.util.List getContainersFromPreviousAttemptsOrBuilderList() { if (containersFromPreviousAttemptsBuilder_ != null) { return containersFromPreviousAttemptsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder() { return getContainersFromPreviousAttemptsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder( int index) { return getContainersFromPreviousAttemptsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 4; */ public java.util.List getContainersFromPreviousAttemptsBuilderList() { return getContainersFromPreviousAttemptsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> getContainersFromPreviousAttemptsFieldBuilder() { if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>( containersFromPreviousAttempts_, ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); containersFromPreviousAttempts_ = null; } return containersFromPreviousAttemptsBuilder_; } private java.lang.Object queue_ = ""; /** * optional string queue = 5; */ public boolean hasQueue() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string queue = 5; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 5; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 5; */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; queue_ = value; onChanged(); return this; } /** * optional string queue = 5; */ public Builder clearQueue() { bitField0_ = (bitField0_ & ~0x00000010); queue_ = getDefaultInstance().getQueue(); onChanged(); return this; } /** * optional string queue = 5; */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; queue_ = value; onChanged(); return this; } private java.util.List nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList(); private void ensureNmTokensFromPreviousAttemptsIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { nmTokensFromPreviousAttempts_ = new java.util.ArrayList(nmTokensFromPreviousAttempts_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> nmTokensFromPreviousAttemptsBuilder_; /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public java.util.List getNmTokensFromPreviousAttemptsList() { if (nmTokensFromPreviousAttemptsBuilder_ == null) { return java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_); } else { return nmTokensFromPreviousAttemptsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public int getNmTokensFromPreviousAttemptsCount() { if (nmTokensFromPreviousAttemptsBuilder_ == null) { return nmTokensFromPreviousAttempts_.size(); } else { return nmTokensFromPreviousAttemptsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokensFromPreviousAttempts(int index) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { return nmTokensFromPreviousAttempts_.get(index); } else { return nmTokensFromPreviousAttemptsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder setNmTokensFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.set(index, value); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder setNmTokensFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.set(index, builderForValue.build()); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder addNmTokensFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.add(value); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder addNmTokensFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.add(index, value); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder addNmTokensFromPreviousAttempts( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.add(builderForValue.build()); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder addNmTokensFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.add(index, builderForValue.build()); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder addAllNmTokensFromPreviousAttempts( java.lang.Iterable values) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { ensureNmTokensFromPreviousAttemptsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nmTokensFromPreviousAttempts_); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder clearNmTokensFromPreviousAttempts() { if (nmTokensFromPreviousAttemptsBuilder_ == null) { nmTokensFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public Builder removeNmTokensFromPreviousAttempts(int index) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { ensureNmTokensFromPreviousAttemptsIsMutable(); nmTokensFromPreviousAttempts_.remove(index); onChanged(); } else { nmTokensFromPreviousAttemptsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder getNmTokensFromPreviousAttemptsBuilder( int index) { return getNmTokensFromPreviousAttemptsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensFromPreviousAttemptsOrBuilder( int index) { if (nmTokensFromPreviousAttemptsBuilder_ == null) { return nmTokensFromPreviousAttempts_.get(index); } else { return nmTokensFromPreviousAttemptsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public java.util.List getNmTokensFromPreviousAttemptsOrBuilderList() { if (nmTokensFromPreviousAttemptsBuilder_ != null) { return nmTokensFromPreviousAttemptsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nmTokensFromPreviousAttempts_); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensFromPreviousAttemptsBuilder() { return getNmTokensFromPreviousAttemptsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensFromPreviousAttemptsBuilder( int index) { return getNmTokensFromPreviousAttemptsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens_from_previous_attempts = 6; */ public java.util.List getNmTokensFromPreviousAttemptsBuilderList() { return getNmTokensFromPreviousAttemptsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> getNmTokensFromPreviousAttemptsFieldBuilder() { if (nmTokensFromPreviousAttemptsBuilder_ == null) { nmTokensFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder>( nmTokensFromPreviousAttempts_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); nmTokensFromPreviousAttempts_ = null; } return nmTokensFromPreviousAttemptsBuilder_; } private java.util.List schedulerResourceTypes_ = java.util.Collections.emptyList(); private void ensureSchedulerResourceTypesIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { schedulerResourceTypes_ = new java.util.ArrayList(schedulerResourceTypes_); bitField0_ |= 0x00000040; } } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public java.util.List getSchedulerResourceTypesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes>(schedulerResourceTypes_, schedulerResourceTypes_converter_); } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public int getSchedulerResourceTypesCount() { return schedulerResourceTypes_.size(); } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes getSchedulerResourceTypes(int index) { return schedulerResourceTypes_converter_.convert(schedulerResourceTypes_.get(index)); } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public Builder setSchedulerResourceTypes( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value) { if (value == null) { throw new NullPointerException(); } ensureSchedulerResourceTypesIsMutable(); schedulerResourceTypes_.set(index, value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public Builder addSchedulerResourceTypes(org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value) { if (value == null) { throw new NullPointerException(); } ensureSchedulerResourceTypesIsMutable(); schedulerResourceTypes_.add(value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public Builder addAllSchedulerResourceTypes( java.lang.Iterable values) { ensureSchedulerResourceTypesIsMutable(); for (org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes value : values) { schedulerResourceTypes_.add(value.getNumber()); } onChanged(); return this; } /** * repeated .hadoop.yarn.SchedulerResourceTypes scheduler_resource_types = 7; */ public Builder clearSchedulerResourceTypes() { schedulerResourceTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> resourceProfilesBuilder_; /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public boolean hasResourceProfiles() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() { if (resourceProfilesBuilder_ == null) { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } else { return resourceProfilesBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public Builder setResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) { if (resourceProfilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resourceProfiles_ = value; onChanged(); } else { resourceProfilesBuilder_.setMessage(value); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public Builder setResourceProfiles( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder builderForValue) { if (resourceProfilesBuilder_ == null) { resourceProfiles_ = builderForValue.build(); onChanged(); } else { resourceProfilesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public Builder mergeResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) { if (resourceProfilesBuilder_ == null) { if (((bitField0_ & 0x00000080) != 0) && resourceProfiles_ != null && resourceProfiles_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) { resourceProfiles_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.newBuilder(resourceProfiles_).mergeFrom(value).buildPartial(); } else { resourceProfiles_ = value; } onChanged(); } else { resourceProfilesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public Builder clearResourceProfiles() { if (resourceProfilesBuilder_ == null) { resourceProfiles_ = null; onChanged(); } else { resourceProfilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); return this; } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder getResourceProfilesBuilder() { bitField0_ |= 0x00000080; onChanged(); return getResourceProfilesFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() { if (resourceProfilesBuilder_ != null) { return resourceProfilesBuilder_.getMessageOrBuilder(); } else { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } } /** * optional .hadoop.yarn.ResourceProfilesProto resource_profiles = 8; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> getResourceProfilesFieldBuilder() { if (resourceProfilesBuilder_ == null) { resourceProfilesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder>( getResourceProfiles(), getParentForChildren(), isClean()); resourceProfiles_ = null; } return resourceProfilesBuilder_; } private java.util.List resourceTypes_ = java.util.Collections.emptyList(); private void ensureResourceTypesIsMutable() { if (!((bitField0_ & 0x00000100) != 0)) { resourceTypes_ = new java.util.ArrayList(resourceTypes_); bitField0_ |= 0x00000100; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> resourceTypesBuilder_; /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public java.util.List getResourceTypesList() { if (resourceTypesBuilder_ == null) { return java.util.Collections.unmodifiableList(resourceTypes_); } else { return resourceTypesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public int getResourceTypesCount() { if (resourceTypesBuilder_ == null) { return resourceTypes_.size(); } else { return resourceTypesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypes(int index) { if (resourceTypesBuilder_ == null) { return resourceTypes_.get(index); } else { return resourceTypesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder setResourceTypes( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypesIsMutable(); resourceTypes_.set(index, value); onChanged(); } else { resourceTypesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder setResourceTypes( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypesBuilder_ == null) { ensureResourceTypesIsMutable(); resourceTypes_.set(index, builderForValue.build()); onChanged(); } else { resourceTypesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder addResourceTypes(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypesIsMutable(); resourceTypes_.add(value); onChanged(); } else { resourceTypesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder addResourceTypes( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypesIsMutable(); resourceTypes_.add(index, value); onChanged(); } else { resourceTypesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder addResourceTypes( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypesBuilder_ == null) { ensureResourceTypesIsMutable(); resourceTypes_.add(builderForValue.build()); onChanged(); } else { resourceTypesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder addResourceTypes( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypesBuilder_ == null) { ensureResourceTypesIsMutable(); resourceTypes_.add(index, builderForValue.build()); onChanged(); } else { resourceTypesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder addAllResourceTypes( java.lang.Iterable values) { if (resourceTypesBuilder_ == null) { ensureResourceTypesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, resourceTypes_); onChanged(); } else { resourceTypesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder clearResourceTypes() { if (resourceTypesBuilder_ == null) { resourceTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); } else { resourceTypesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public Builder removeResourceTypes(int index) { if (resourceTypesBuilder_ == null) { ensureResourceTypesIsMutable(); resourceTypes_.remove(index); onChanged(); } else { resourceTypesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder getResourceTypesBuilder( int index) { return getResourceTypesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypesOrBuilder( int index) { if (resourceTypesBuilder_ == null) { return resourceTypes_.get(index); } else { return resourceTypesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public java.util.List getResourceTypesOrBuilderList() { if (resourceTypesBuilder_ != null) { return resourceTypesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resourceTypes_); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypesBuilder() { return getResourceTypesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypesBuilder( int index) { return getResourceTypesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_types = 9; */ public java.util.List getResourceTypesBuilderList() { return getResourceTypesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> getResourceTypesFieldBuilder() { if (resourceTypesBuilder_ == null) { resourceTypesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder>( resourceTypes_, ((bitField0_ & 0x00000100) != 0), getParentForChildren(), isClean()); resourceTypes_ = null; } return resourceTypesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RegisterApplicationMasterResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RegisterApplicationMasterResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RegisterApplicationMasterResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RegisterApplicationMasterResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FinishApplicationMasterRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FinishApplicationMasterRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string diagnostics = 1; */ boolean hasDiagnostics(); /** * optional string diagnostics = 1; */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); /** * optional string tracking_url = 2; */ boolean hasTrackingUrl(); /** * optional string tracking_url = 2; */ java.lang.String getTrackingUrl(); /** * optional string tracking_url = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes(); /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ boolean hasFinalApplicationStatus(); /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus(); } /** * Protobuf type {@code hadoop.yarn.FinishApplicationMasterRequestProto} */ public static final class FinishApplicationMasterRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FinishApplicationMasterRequestProto) FinishApplicationMasterRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FinishApplicationMasterRequestProto.newBuilder() to construct. private FinishApplicationMasterRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FinishApplicationMasterRequestProto() { diagnostics_ = ""; trackingUrl_ = ""; finalApplicationStatus_ = 0; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FinishApplicationMasterRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; diagnostics_ = bs; break; } case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; trackingUrl_ = bs; break; } case 24: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; finalApplicationStatus_ = rawValue; } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.Builder.class); } private int bitField0_; public static final int DIAGNOSTICS_FIELD_NUMBER = 1; private volatile java.lang.Object diagnostics_; /** * optional string diagnostics = 1; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string diagnostics = 1; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TRACKING_URL_FIELD_NUMBER = 2; private volatile java.lang.Object trackingUrl_; /** * optional string tracking_url = 2; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string tracking_url = 2; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } } /** * optional string tracking_url = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 3; private int finalApplicationStatus_; /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public boolean hasFinalApplicationStatus() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, diagnostics_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, trackingUrl_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(3, finalApplicationStatus_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, diagnostics_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, trackingUrl_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(3, finalApplicationStatus_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto) obj; if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (hasTrackingUrl() != other.hasTrackingUrl()) return false; if (hasTrackingUrl()) { if (!getTrackingUrl() .equals(other.getTrackingUrl())) return false; } if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false; if (hasFinalApplicationStatus()) { if (finalApplicationStatus_ != other.finalApplicationStatus_) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } if (hasTrackingUrl()) { hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER; hash = (53 * hash) + getTrackingUrl().hashCode(); } if (hasFinalApplicationStatus()) { hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER; hash = (53 * hash) + finalApplicationStatus_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FinishApplicationMasterRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FinishApplicationMasterRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); diagnostics_ = ""; bitField0_ = (bitField0_ & ~0x00000001); trackingUrl_ = ""; bitField0_ = (bitField0_ & ~0x00000002); finalApplicationStatus_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.diagnostics_ = diagnostics_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.trackingUrl_ = trackingUrl_; if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.finalApplicationStatus_ = finalApplicationStatus_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance()) return this; if (other.hasDiagnostics()) { bitField0_ |= 0x00000001; diagnostics_ = other.diagnostics_; onChanged(); } if (other.hasTrackingUrl()) { bitField0_ |= 0x00000002; trackingUrl_ = other.trackingUrl_; onChanged(); } if (other.hasFinalApplicationStatus()) { setFinalApplicationStatus(other.getFinalApplicationStatus()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object diagnostics_ = ""; /** * optional string diagnostics = 1; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string diagnostics = 1; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 1; */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; diagnostics_ = value; onChanged(); return this; } /** * optional string diagnostics = 1; */ public Builder clearDiagnostics() { bitField0_ = (bitField0_ & ~0x00000001); diagnostics_ = getDefaultInstance().getDiagnostics(); onChanged(); return this; } /** * optional string diagnostics = 1; */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; diagnostics_ = value; onChanged(); return this; } private java.lang.Object trackingUrl_ = ""; /** * optional string tracking_url = 2; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string tracking_url = 2; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string tracking_url = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string tracking_url = 2; */ public Builder setTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; trackingUrl_ = value; onChanged(); return this; } /** * optional string tracking_url = 2; */ public Builder clearTrackingUrl() { bitField0_ = (bitField0_ & ~0x00000002); trackingUrl_ = getDefaultInstance().getTrackingUrl(); onChanged(); return this; } /** * optional string tracking_url = 2; */ public Builder setTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; trackingUrl_ = value; onChanged(); return this; } private int finalApplicationStatus_ = 0; /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public boolean hasFinalApplicationStatus() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.valueOf(finalApplicationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result; } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; finalApplicationStatus_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 3; */ public Builder clearFinalApplicationStatus() { bitField0_ = (bitField0_ & ~0x00000004); finalApplicationStatus_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FinishApplicationMasterRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FinishApplicationMasterRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FinishApplicationMasterRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new FinishApplicationMasterRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FinishApplicationMasterResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FinishApplicationMasterResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional bool isUnregistered = 1 [default = false]; */ boolean hasIsUnregistered(); /** * optional bool isUnregistered = 1 [default = false]; */ boolean getIsUnregistered(); } /** * Protobuf type {@code hadoop.yarn.FinishApplicationMasterResponseProto} */ public static final class FinishApplicationMasterResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FinishApplicationMasterResponseProto) FinishApplicationMasterResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FinishApplicationMasterResponseProto.newBuilder() to construct. private FinishApplicationMasterResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FinishApplicationMasterResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FinishApplicationMasterResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; isUnregistered_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.Builder.class); } private int bitField0_; public static final int ISUNREGISTERED_FIELD_NUMBER = 1; private boolean isUnregistered_; /** * optional bool isUnregistered = 1 [default = false]; */ public boolean hasIsUnregistered() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool isUnregistered = 1 [default = false]; */ public boolean getIsUnregistered() { return isUnregistered_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeBool(1, isUnregistered_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(1, isUnregistered_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) obj; if (hasIsUnregistered() != other.hasIsUnregistered()) return false; if (hasIsUnregistered()) { if (getIsUnregistered() != other.getIsUnregistered()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIsUnregistered()) { hash = (37 * hash) + ISUNREGISTERED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIsUnregistered()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FinishApplicationMasterResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FinishApplicationMasterResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); isUnregistered_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.isUnregistered_ = isUnregistered_; to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance()) return this; if (other.hasIsUnregistered()) { setIsUnregistered(other.getIsUnregistered()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean isUnregistered_ ; /** * optional bool isUnregistered = 1 [default = false]; */ public boolean hasIsUnregistered() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool isUnregistered = 1 [default = false]; */ public boolean getIsUnregistered() { return isUnregistered_; } /** * optional bool isUnregistered = 1 [default = false]; */ public Builder setIsUnregistered(boolean value) { bitField0_ |= 0x00000001; isUnregistered_ = value; onChanged(); return this; } /** * optional bool isUnregistered = 1 [default = false]; */ public Builder clearIsUnregistered() { bitField0_ = (bitField0_ & ~0x00000001); isUnregistered_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FinishApplicationMasterResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FinishApplicationMasterResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FinishApplicationMasterResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new FinishApplicationMasterResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateContainerRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateContainerRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required int32 container_version = 1; */ boolean hasContainerVersion(); /** * required int32 container_version = 1; */ int getContainerVersion(); /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ boolean hasContainerId(); /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ boolean hasUpdateType(); /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType(); /** * optional .hadoop.yarn.ResourceProto capability = 4; */ boolean hasCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder(); /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType(); } /** * Protobuf type {@code hadoop.yarn.UpdateContainerRequestProto} */ public static final class UpdateContainerRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateContainerRequestProto) UpdateContainerRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateContainerRequestProto.newBuilder() to construct. private UpdateContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateContainerRequestProto() { updateType_ = 0; executionType_ = 1; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateContainerRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; containerVersion_ = input.readInt32(); break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; updateType_ = rawValue; } break; } case 34: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null; if (((bitField0_ & 0x00000008) != 0)) { subBuilder = capability_.toBuilder(); } capability_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(capability_); capability_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 40: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(5, rawValue); } else { bitField0_ |= 0x00000010; executionType_ = rawValue; } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_VERSION_FIELD_NUMBER = 1; private int containerVersion_; /** * required int32 container_version = 1; */ public boolean hasContainerVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required int32 container_version = 1; */ public int getContainerVersion() { return containerVersion_; } public static final int CONTAINER_ID_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int UPDATE_TYPE_FIELD_NUMBER = 3; private int updateType_; /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public boolean hasUpdateType() { return ((bitField0_ & 0x00000004) != 0); } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(updateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result; } public static final int CAPABILITY_FIELD_NUMBER = 4; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public boolean hasCapability() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } public static final int EXECUTION_TYPE_FIELD_NUMBER = 5; private int executionType_; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public boolean hasExecutionType() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.valueOf(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasContainerVersion()) { memoizedIsInitialized = 0; return false; } if (!hasContainerId()) { memoizedIsInitialized = 0; return false; } if (!hasUpdateType()) { memoizedIsInitialized = 0; return false; } if (hasCapability()) { if (!getCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, containerVersion_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getContainerId()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(3, updateType_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getCapability()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeEnum(5, executionType_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, containerVersion_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getContainerId()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(3, updateType_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getCapability()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(5, executionType_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto) obj; if (hasContainerVersion() != other.hasContainerVersion()) return false; if (hasContainerVersion()) { if (getContainerVersion() != other.getContainerVersion()) return false; } if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasUpdateType() != other.hasUpdateType()) return false; if (hasUpdateType()) { if (updateType_ != other.updateType_) return false; } if (hasCapability() != other.hasCapability()) return false; if (hasCapability()) { if (!getCapability() .equals(other.getCapability())) return false; } if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (executionType_ != other.executionType_) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerVersion()) { hash = (37 * hash) + CONTAINER_VERSION_FIELD_NUMBER; hash = (53 * hash) + getContainerVersion(); } if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasUpdateType()) { hash = (37 * hash) + UPDATE_TYPE_FIELD_NUMBER; hash = (53 * hash) + updateType_; } if (hasCapability()) { hash = (37 * hash) + CAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getCapability().hashCode(); } if (hasExecutionType()) { hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + executionType_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateContainerRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateContainerRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getCapabilityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); containerVersion_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); updateType_ = 0; bitField0_ = (bitField0_ & ~0x00000004); if (capabilityBuilder_ == null) { capability_ = null; } else { capabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); executionType_ = 1; bitField0_ = (bitField0_ & ~0x00000010); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.containerVersion_ = containerVersion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.updateType_ = updateType_; if (((from_bitField0_ & 0x00000008) != 0)) { if (capabilityBuilder_ == null) { result.capability_ = capability_; } else { result.capability_ = capabilityBuilder_.build(); } to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { to_bitField0_ |= 0x00000010; } result.executionType_ = executionType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()) return this; if (other.hasContainerVersion()) { setContainerVersion(other.getContainerVersion()); } if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasUpdateType()) { setUpdateType(other.getUpdateType()); } if (other.hasCapability()) { mergeCapability(other.getCapability()); } if (other.hasExecutionType()) { setExecutionType(other.getExecutionType()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasContainerVersion()) { return false; } if (!hasContainerId()) { return false; } if (!hasUpdateType()) { return false; } if (hasCapability()) { if (!getCapability().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int containerVersion_ ; /** * required int32 container_version = 1; */ public boolean hasContainerVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * required int32 container_version = 1; */ public int getContainerVersion() { return containerVersion_; } /** * required int32 container_version = 1; */ public Builder setContainerVersion(int value) { bitField0_ |= 0x00000001; containerVersion_ = value; onChanged(); return this; } /** * required int32 container_version = 1; */ public Builder clearContainerVersion() { bitField0_ = (bitField0_ & ~0x00000001); containerVersion_ = 0; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * required .hadoop.yarn.ContainerIdProto container_id = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private int updateType_ = 0; /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public boolean hasUpdateType() { return ((bitField0_ & 0x00000004) != 0); } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(updateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result; } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public Builder setUpdateType(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; updateType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 3; */ public Builder clearUpdateType() { bitField0_ = (bitField0_ & ~0x00000004); updateType_ = 0; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public boolean hasCapability() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { if (capabilityBuilder_ == null) { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } else { return capabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capability_ = value; onChanged(); } else { capabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public Builder setCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (capabilityBuilder_ == null) { capability_ = builderForValue.build(); onChanged(); } else { capabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && capability_ != null && capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { capability_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(capability_).mergeFrom(value).buildPartial(); } else { capability_ = value; } onChanged(); } else { capabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public Builder clearCapability() { if (capabilityBuilder_ == null) { capability_ = null; onChanged(); } else { capabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() { bitField0_ |= 0x00000008; onChanged(); return getCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { if (capabilityBuilder_ != null) { return capabilityBuilder_.getMessageOrBuilder(); } else { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } } /** * optional .hadoop.yarn.ResourceProto capability = 4; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getCapabilityFieldBuilder() { if (capabilityBuilder_ == null) { capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getCapability(), getParentForChildren(), isClean()); capability_ = null; } return capabilityBuilder_; } private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public boolean hasExecutionType() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.valueOf(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; executionType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 5; */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000010); executionType_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateContainerRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateContainerRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateContainerRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateContainerRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateContainerErrorProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateContainerErrorProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string reason = 1; */ boolean hasReason(); /** * optional string reason = 1; */ java.lang.String getReason(); /** * optional string reason = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getReasonBytes(); /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ boolean hasUpdateRequest(); /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest(); /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder(); /** * optional int32 current_container_version = 3; */ boolean hasCurrentContainerVersion(); /** * optional int32 current_container_version = 3; */ int getCurrentContainerVersion(); } /** * Protobuf type {@code hadoop.yarn.UpdateContainerErrorProto} */ public static final class UpdateContainerErrorProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateContainerErrorProto) UpdateContainerErrorProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateContainerErrorProto.newBuilder() to construct. private UpdateContainerErrorProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateContainerErrorProto() { reason_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateContainerErrorProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; reason_ = bs; break; } case 18: { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = updateRequest_.toBuilder(); } updateRequest_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(updateRequest_); updateRequest_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { bitField0_ |= 0x00000004; currentContainerVersion_ = input.readInt32(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder.class); } private int bitField0_; public static final int REASON_FIELD_NUMBER = 1; private volatile java.lang.Object reason_; /** * optional string reason = 1; */ public boolean hasReason() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string reason = 1; */ public java.lang.String getReason() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reason_ = s; } return s; } } /** * optional string reason = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reason_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int UPDATE_REQUEST_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto updateRequest_; /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public boolean hasUpdateRequest() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest() { return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_; } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder() { return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_; } public static final int CURRENT_CONTAINER_VERSION_FIELD_NUMBER = 3; private int currentContainerVersion_; /** * optional int32 current_container_version = 3; */ public boolean hasCurrentContainerVersion() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 current_container_version = 3; */ public int getCurrentContainerVersion() { return currentContainerVersion_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasUpdateRequest()) { if (!getUpdateRequest().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, reason_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateRequest()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(3, currentContainerVersion_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, reason_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateRequest()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, currentContainerVersion_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto) obj; if (hasReason() != other.hasReason()) return false; if (hasReason()) { if (!getReason() .equals(other.getReason())) return false; } if (hasUpdateRequest() != other.hasUpdateRequest()) return false; if (hasUpdateRequest()) { if (!getUpdateRequest() .equals(other.getUpdateRequest())) return false; } if (hasCurrentContainerVersion() != other.hasCurrentContainerVersion()) return false; if (hasCurrentContainerVersion()) { if (getCurrentContainerVersion() != other.getCurrentContainerVersion()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReason()) { hash = (37 * hash) + REASON_FIELD_NUMBER; hash = (53 * hash) + getReason().hashCode(); } if (hasUpdateRequest()) { hash = (37 * hash) + UPDATE_REQUEST_FIELD_NUMBER; hash = (53 * hash) + getUpdateRequest().hashCode(); } if (hasCurrentContainerVersion()) { hash = (37 * hash) + CURRENT_CONTAINER_VERSION_FIELD_NUMBER; hash = (53 * hash) + getCurrentContainerVersion(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateContainerErrorProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateContainerErrorProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateRequestFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); reason_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (updateRequestBuilder_ == null) { updateRequest_ = null; } else { updateRequestBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); currentContainerVersion_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.reason_ = reason_; if (((from_bitField0_ & 0x00000002) != 0)) { if (updateRequestBuilder_ == null) { result.updateRequest_ = updateRequest_; } else { result.updateRequest_ = updateRequestBuilder_.build(); } to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.currentContainerVersion_ = currentContainerVersion_; to_bitField0_ |= 0x00000004; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance()) return this; if (other.hasReason()) { bitField0_ |= 0x00000001; reason_ = other.reason_; onChanged(); } if (other.hasUpdateRequest()) { mergeUpdateRequest(other.getUpdateRequest()); } if (other.hasCurrentContainerVersion()) { setCurrentContainerVersion(other.getCurrentContainerVersion()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasUpdateRequest()) { if (!getUpdateRequest().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object reason_ = ""; /** * optional string reason = 1; */ public boolean hasReason() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string reason = 1; */ public java.lang.String getReason() { java.lang.Object ref = reason_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reason_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string reason = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getReasonBytes() { java.lang.Object ref = reason_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reason_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string reason = 1; */ public Builder setReason( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; reason_ = value; onChanged(); return this; } /** * optional string reason = 1; */ public Builder clearReason() { bitField0_ = (bitField0_ & ~0x00000001); reason_ = getDefaultInstance().getReason(); onChanged(); return this; } /** * optional string reason = 1; */ public Builder setReasonBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; reason_ = value; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto updateRequest_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> updateRequestBuilder_; /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public boolean hasUpdateRequest() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequest() { if (updateRequestBuilder_ == null) { return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_; } else { return updateRequestBuilder_.getMessage(); } } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public Builder setUpdateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) { if (updateRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateRequest_ = value; onChanged(); } else { updateRequestBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public Builder setUpdateRequest( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) { if (updateRequestBuilder_ == null) { updateRequest_ = builderForValue.build(); onChanged(); } else { updateRequestBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public Builder mergeUpdateRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) { if (updateRequestBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateRequest_ != null && updateRequest_ != org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()) { updateRequest_ = org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.newBuilder(updateRequest_).mergeFrom(value).buildPartial(); } else { updateRequest_ = value; } onChanged(); } else { updateRequestBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public Builder clearUpdateRequest() { if (updateRequestBuilder_ == null) { updateRequest_ = null; onChanged(); } else { updateRequestBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder getUpdateRequestBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateRequestFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestOrBuilder() { if (updateRequestBuilder_ != null) { return updateRequestBuilder_.getMessageOrBuilder(); } else { return updateRequest_ == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance() : updateRequest_; } } /** * optional .hadoop.yarn.UpdateContainerRequestProto update_request = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> getUpdateRequestFieldBuilder() { if (updateRequestBuilder_ == null) { updateRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder>( getUpdateRequest(), getParentForChildren(), isClean()); updateRequest_ = null; } return updateRequestBuilder_; } private int currentContainerVersion_ ; /** * optional int32 current_container_version = 3; */ public boolean hasCurrentContainerVersion() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 current_container_version = 3; */ public int getCurrentContainerVersion() { return currentContainerVersion_; } /** * optional int32 current_container_version = 3; */ public Builder setCurrentContainerVersion(int value) { bitField0_ |= 0x00000004; currentContainerVersion_ = value; onChanged(); return this; } /** * optional int32 current_container_version = 3; */ public Builder clearCurrentContainerVersion() { bitField0_ = (bitField0_ & ~0x00000004); currentContainerVersion_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateContainerErrorProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateContainerErrorProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateContainerErrorProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateContainerErrorProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface AllocateRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.AllocateRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ java.util.List getAskList(); /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index); /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ int getAskCount(); /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ java.util.List getAskOrBuilderList(); /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ java.util.List getReleaseList(); /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index); /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ int getReleaseCount(); /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ java.util.List getReleaseOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder( int index); /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ boolean hasBlacklistRequest(); /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest(); /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder(); /** * optional int32 response_id = 4; */ boolean hasResponseId(); /** * optional int32 response_id = 4; */ int getResponseId(); /** * optional float progress = 5; */ boolean hasProgress(); /** * optional float progress = 5; */ float getProgress(); /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ java.util.List getUpdateRequestsList(); /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index); /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ int getUpdateRequestsCount(); /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ java.util.List getUpdateRequestsOrBuilderList(); /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder( int index); /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ java.util.List getSchedulingRequestsList(); /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index); /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ int getSchedulingRequestsCount(); /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ java.util.List getSchedulingRequestsOrBuilderList(); /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder( int index); /** * optional string tracking_url = 11; */ boolean hasTrackingUrl(); /** * optional string tracking_url = 11; */ java.lang.String getTrackingUrl(); /** * optional string tracking_url = 11; */ org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes(); } /** * Protobuf type {@code hadoop.yarn.AllocateRequestProto} */ public static final class AllocateRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.AllocateRequestProto) AllocateRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use AllocateRequestProto.newBuilder() to construct. private AllocateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private AllocateRequestProto() { ask_ = java.util.Collections.emptyList(); release_ = java.util.Collections.emptyList(); updateRequests_ = java.util.Collections.emptyList(); schedulingRequests_ = java.util.Collections.emptyList(); trackingUrl_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AllocateRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { ask_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } ask_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { release_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } release_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } case 26: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = blacklistRequest_.toBuilder(); } blacklistRequest_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(blacklistRequest_); blacklistRequest_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 32: { bitField0_ |= 0x00000002; responseId_ = input.readInt32(); break; } case 45: { bitField0_ |= 0x00000004; progress_ = input.readFloat(); break; } case 58: { if (!((mutable_bitField0_ & 0x00000020) != 0)) { updateRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } updateRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.PARSER, extensionRegistry)); break; } case 82: { if (!((mutable_bitField0_ & 0x00000040) != 0)) { schedulingRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000040; } schedulingRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.PARSER, extensionRegistry)); break; } case 90: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; trackingUrl_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { ask_ = java.util.Collections.unmodifiableList(ask_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { release_ = java.util.Collections.unmodifiableList(release_); } if (((mutable_bitField0_ & 0x00000020) != 0)) { updateRequests_ = java.util.Collections.unmodifiableList(updateRequests_); } if (((mutable_bitField0_ & 0x00000040) != 0)) { schedulingRequests_ = java.util.Collections.unmodifiableList(schedulingRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder.class); } private int bitField0_; public static final int ASK_FIELD_NUMBER = 1; private java.util.List ask_; /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public java.util.List getAskList() { return ask_; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public java.util.List getAskOrBuilderList() { return ask_; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public int getAskCount() { return ask_.size(); } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index) { return ask_.get(index); } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder( int index) { return ask_.get(index); } public static final int RELEASE_FIELD_NUMBER = 2; private java.util.List release_; /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public java.util.List getReleaseList() { return release_; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public java.util.List getReleaseOrBuilderList() { return release_; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public int getReleaseCount() { return release_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index) { return release_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder( int index) { return release_.get(index); } public static final int BLACKLIST_REQUEST_FIELD_NUMBER = 3; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto blacklistRequest_; /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public boolean hasBlacklistRequest() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest() { return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_; } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder() { return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_; } public static final int RESPONSE_ID_FIELD_NUMBER = 4; private int responseId_; /** * optional int32 response_id = 4; */ public boolean hasResponseId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 response_id = 4; */ public int getResponseId() { return responseId_; } public static final int PROGRESS_FIELD_NUMBER = 5; private float progress_; /** * optional float progress = 5; */ public boolean hasProgress() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float progress = 5; */ public float getProgress() { return progress_; } public static final int UPDATE_REQUESTS_FIELD_NUMBER = 7; private java.util.List updateRequests_; /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public java.util.List getUpdateRequestsList() { return updateRequests_; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public java.util.List getUpdateRequestsOrBuilderList() { return updateRequests_; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public int getUpdateRequestsCount() { return updateRequests_.size(); } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index) { return updateRequests_.get(index); } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder( int index) { return updateRequests_.get(index); } public static final int SCHEDULING_REQUESTS_FIELD_NUMBER = 10; private java.util.List schedulingRequests_; /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public java.util.List getSchedulingRequestsList() { return schedulingRequests_; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public java.util.List getSchedulingRequestsOrBuilderList() { return schedulingRequests_; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public int getSchedulingRequestsCount() { return schedulingRequests_.size(); } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index) { return schedulingRequests_.get(index); } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder( int index) { return schedulingRequests_.get(index); } public static final int TRACKING_URL_FIELD_NUMBER = 11; private volatile java.lang.Object trackingUrl_; /** * optional string tracking_url = 11; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string tracking_url = 11; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } } /** * optional string tracking_url = 11; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getAskCount(); i++) { if (!getAsk(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getUpdateRequestsCount(); i++) { if (!getUpdateRequests(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getSchedulingRequestsCount(); i++) { if (!getSchedulingRequests(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < ask_.size(); i++) { output.writeMessage(1, ask_.get(i)); } for (int i = 0; i < release_.size(); i++) { output.writeMessage(2, release_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getBlacklistRequest()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(4, responseId_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeFloat(5, progress_); } for (int i = 0; i < updateRequests_.size(); i++) { output.writeMessage(7, updateRequests_.get(i)); } for (int i = 0; i < schedulingRequests_.size(); i++) { output.writeMessage(10, schedulingRequests_.get(i)); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, trackingUrl_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < ask_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, ask_.get(i)); } for (int i = 0; i < release_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, release_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getBlacklistRequest()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, responseId_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(5, progress_); } for (int i = 0; i < updateRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, updateRequests_.get(i)); } for (int i = 0; i < schedulingRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(10, schedulingRequests_.get(i)); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, trackingUrl_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto) obj; if (!getAskList() .equals(other.getAskList())) return false; if (!getReleaseList() .equals(other.getReleaseList())) return false; if (hasBlacklistRequest() != other.hasBlacklistRequest()) return false; if (hasBlacklistRequest()) { if (!getBlacklistRequest() .equals(other.getBlacklistRequest())) return false; } if (hasResponseId() != other.hasResponseId()) return false; if (hasResponseId()) { if (getResponseId() != other.getResponseId()) return false; } if (hasProgress() != other.hasProgress()) return false; if (hasProgress()) { if (java.lang.Float.floatToIntBits(getProgress()) != java.lang.Float.floatToIntBits( other.getProgress())) return false; } if (!getUpdateRequestsList() .equals(other.getUpdateRequestsList())) return false; if (!getSchedulingRequestsList() .equals(other.getSchedulingRequestsList())) return false; if (hasTrackingUrl() != other.hasTrackingUrl()) return false; if (hasTrackingUrl()) { if (!getTrackingUrl() .equals(other.getTrackingUrl())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAskCount() > 0) { hash = (37 * hash) + ASK_FIELD_NUMBER; hash = (53 * hash) + getAskList().hashCode(); } if (getReleaseCount() > 0) { hash = (37 * hash) + RELEASE_FIELD_NUMBER; hash = (53 * hash) + getReleaseList().hashCode(); } if (hasBlacklistRequest()) { hash = (37 * hash) + BLACKLIST_REQUEST_FIELD_NUMBER; hash = (53 * hash) + getBlacklistRequest().hashCode(); } if (hasResponseId()) { hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER; hash = (53 * hash) + getResponseId(); } if (hasProgress()) { hash = (37 * hash) + PROGRESS_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getProgress()); } if (getUpdateRequestsCount() > 0) { hash = (37 * hash) + UPDATE_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getUpdateRequestsList().hashCode(); } if (getSchedulingRequestsCount() > 0) { hash = (37 * hash) + SCHEDULING_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getSchedulingRequestsList().hashCode(); } if (hasTrackingUrl()) { hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER; hash = (53 * hash) + getTrackingUrl().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.AllocateRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.AllocateRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAskFieldBuilder(); getReleaseFieldBuilder(); getBlacklistRequestFieldBuilder(); getUpdateRequestsFieldBuilder(); getSchedulingRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (askBuilder_ == null) { ask_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { askBuilder_.clear(); } if (releaseBuilder_ == null) { release_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { releaseBuilder_.clear(); } if (blacklistRequestBuilder_ == null) { blacklistRequest_ = null; } else { blacklistRequestBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); responseId_ = 0; bitField0_ = (bitField0_ & ~0x00000008); progress_ = 0F; bitField0_ = (bitField0_ & ~0x00000010); if (updateRequestsBuilder_ == null) { updateRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { updateRequestsBuilder_.clear(); } if (schedulingRequestsBuilder_ == null) { schedulingRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); } else { schedulingRequestsBuilder_.clear(); } trackingUrl_ = ""; bitField0_ = (bitField0_ & ~0x00000080); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (askBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { ask_ = java.util.Collections.unmodifiableList(ask_); bitField0_ = (bitField0_ & ~0x00000001); } result.ask_ = ask_; } else { result.ask_ = askBuilder_.build(); } if (releaseBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { release_ = java.util.Collections.unmodifiableList(release_); bitField0_ = (bitField0_ & ~0x00000002); } result.release_ = release_; } else { result.release_ = releaseBuilder_.build(); } if (((from_bitField0_ & 0x00000004) != 0)) { if (blacklistRequestBuilder_ == null) { result.blacklistRequest_ = blacklistRequest_; } else { result.blacklistRequest_ = blacklistRequestBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000008) != 0)) { result.responseId_ = responseId_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000010) != 0)) { result.progress_ = progress_; to_bitField0_ |= 0x00000004; } if (updateRequestsBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { updateRequests_ = java.util.Collections.unmodifiableList(updateRequests_); bitField0_ = (bitField0_ & ~0x00000020); } result.updateRequests_ = updateRequests_; } else { result.updateRequests_ = updateRequestsBuilder_.build(); } if (schedulingRequestsBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0)) { schedulingRequests_ = java.util.Collections.unmodifiableList(schedulingRequests_); bitField0_ = (bitField0_ & ~0x00000040); } result.schedulingRequests_ = schedulingRequests_; } else { result.schedulingRequests_ = schedulingRequestsBuilder_.build(); } if (((from_bitField0_ & 0x00000080) != 0)) { to_bitField0_ |= 0x00000008; } result.trackingUrl_ = trackingUrl_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance()) return this; if (askBuilder_ == null) { if (!other.ask_.isEmpty()) { if (ask_.isEmpty()) { ask_ = other.ask_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAskIsMutable(); ask_.addAll(other.ask_); } onChanged(); } } else { if (!other.ask_.isEmpty()) { if (askBuilder_.isEmpty()) { askBuilder_.dispose(); askBuilder_ = null; ask_ = other.ask_; bitField0_ = (bitField0_ & ~0x00000001); askBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAskFieldBuilder() : null; } else { askBuilder_.addAllMessages(other.ask_); } } } if (releaseBuilder_ == null) { if (!other.release_.isEmpty()) { if (release_.isEmpty()) { release_ = other.release_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureReleaseIsMutable(); release_.addAll(other.release_); } onChanged(); } } else { if (!other.release_.isEmpty()) { if (releaseBuilder_.isEmpty()) { releaseBuilder_.dispose(); releaseBuilder_ = null; release_ = other.release_; bitField0_ = (bitField0_ & ~0x00000002); releaseBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReleaseFieldBuilder() : null; } else { releaseBuilder_.addAllMessages(other.release_); } } } if (other.hasBlacklistRequest()) { mergeBlacklistRequest(other.getBlacklistRequest()); } if (other.hasResponseId()) { setResponseId(other.getResponseId()); } if (other.hasProgress()) { setProgress(other.getProgress()); } if (updateRequestsBuilder_ == null) { if (!other.updateRequests_.isEmpty()) { if (updateRequests_.isEmpty()) { updateRequests_ = other.updateRequests_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureUpdateRequestsIsMutable(); updateRequests_.addAll(other.updateRequests_); } onChanged(); } } else { if (!other.updateRequests_.isEmpty()) { if (updateRequestsBuilder_.isEmpty()) { updateRequestsBuilder_.dispose(); updateRequestsBuilder_ = null; updateRequests_ = other.updateRequests_; bitField0_ = (bitField0_ & ~0x00000020); updateRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdateRequestsFieldBuilder() : null; } else { updateRequestsBuilder_.addAllMessages(other.updateRequests_); } } } if (schedulingRequestsBuilder_ == null) { if (!other.schedulingRequests_.isEmpty()) { if (schedulingRequests_.isEmpty()) { schedulingRequests_ = other.schedulingRequests_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureSchedulingRequestsIsMutable(); schedulingRequests_.addAll(other.schedulingRequests_); } onChanged(); } } else { if (!other.schedulingRequests_.isEmpty()) { if (schedulingRequestsBuilder_.isEmpty()) { schedulingRequestsBuilder_.dispose(); schedulingRequestsBuilder_ = null; schedulingRequests_ = other.schedulingRequests_; bitField0_ = (bitField0_ & ~0x00000040); schedulingRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSchedulingRequestsFieldBuilder() : null; } else { schedulingRequestsBuilder_.addAllMessages(other.schedulingRequests_); } } } if (other.hasTrackingUrl()) { bitField0_ |= 0x00000080; trackingUrl_ = other.trackingUrl_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getAskCount(); i++) { if (!getAsk(i).isInitialized()) { return false; } } for (int i = 0; i < getUpdateRequestsCount(); i++) { if (!getUpdateRequests(i).isInitialized()) { return false; } } for (int i = 0; i < getSchedulingRequestsCount(); i++) { if (!getSchedulingRequests(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List ask_ = java.util.Collections.emptyList(); private void ensureAskIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { ask_ = new java.util.ArrayList(ask_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> askBuilder_; /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public java.util.List getAskList() { if (askBuilder_ == null) { return java.util.Collections.unmodifiableList(ask_); } else { return askBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public int getAskCount() { if (askBuilder_ == null) { return ask_.size(); } else { return askBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAsk(int index) { if (askBuilder_ == null) { return ask_.get(index); } else { return askBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder setAsk( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (askBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAskIsMutable(); ask_.set(index, value); onChanged(); } else { askBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder setAsk( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (askBuilder_ == null) { ensureAskIsMutable(); ask_.set(index, builderForValue.build()); onChanged(); } else { askBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder addAsk(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (askBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAskIsMutable(); ask_.add(value); onChanged(); } else { askBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder addAsk( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (askBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAskIsMutable(); ask_.add(index, value); onChanged(); } else { askBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder addAsk( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (askBuilder_ == null) { ensureAskIsMutable(); ask_.add(builderForValue.build()); onChanged(); } else { askBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder addAsk( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (askBuilder_ == null) { ensureAskIsMutable(); ask_.add(index, builderForValue.build()); onChanged(); } else { askBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder addAllAsk( java.lang.Iterable values) { if (askBuilder_ == null) { ensureAskIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, ask_); onChanged(); } else { askBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder clearAsk() { if (askBuilder_ == null) { ask_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { askBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public Builder removeAsk(int index) { if (askBuilder_ == null) { ensureAskIsMutable(); ask_.remove(index); onChanged(); } else { askBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getAskBuilder( int index) { return getAskFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAskOrBuilder( int index) { if (askBuilder_ == null) { return ask_.get(index); } else { return askBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public java.util.List getAskOrBuilderList() { if (askBuilder_ != null) { return askBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(ask_); } } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAskBuilder() { return getAskFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAskBuilder( int index) { return getAskFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceRequestProto ask = 1; */ public java.util.List getAskBuilderList() { return getAskFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> getAskFieldBuilder() { if (askBuilder_ == null) { askBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>( ask_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); ask_ = null; } return askBuilder_; } private java.util.List release_ = java.util.Collections.emptyList(); private void ensureReleaseIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { release_ = new java.util.ArrayList(release_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> releaseBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public java.util.List getReleaseList() { if (releaseBuilder_ == null) { return java.util.Collections.unmodifiableList(release_); } else { return releaseBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public int getReleaseCount() { if (releaseBuilder_ == null) { return release_.size(); } else { return releaseBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getRelease(int index) { if (releaseBuilder_ == null) { return release_.get(index); } else { return releaseBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder setRelease( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (releaseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReleaseIsMutable(); release_.set(index, value); onChanged(); } else { releaseBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder setRelease( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (releaseBuilder_ == null) { ensureReleaseIsMutable(); release_.set(index, builderForValue.build()); onChanged(); } else { releaseBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder addRelease(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (releaseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReleaseIsMutable(); release_.add(value); onChanged(); } else { releaseBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder addRelease( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (releaseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReleaseIsMutable(); release_.add(index, value); onChanged(); } else { releaseBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder addRelease( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (releaseBuilder_ == null) { ensureReleaseIsMutable(); release_.add(builderForValue.build()); onChanged(); } else { releaseBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder addRelease( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (releaseBuilder_ == null) { ensureReleaseIsMutable(); release_.add(index, builderForValue.build()); onChanged(); } else { releaseBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder addAllRelease( java.lang.Iterable values) { if (releaseBuilder_ == null) { ensureReleaseIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, release_); onChanged(); } else { releaseBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder clearRelease() { if (releaseBuilder_ == null) { release_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { releaseBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public Builder removeRelease(int index) { if (releaseBuilder_ == null) { ensureReleaseIsMutable(); release_.remove(index); onChanged(); } else { releaseBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getReleaseBuilder( int index) { return getReleaseFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getReleaseOrBuilder( int index) { if (releaseBuilder_ == null) { return release_.get(index); } else { return releaseBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public java.util.List getReleaseOrBuilderList() { if (releaseBuilder_ != null) { return releaseBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(release_); } } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addReleaseBuilder() { return getReleaseFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addReleaseBuilder( int index) { return getReleaseFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto release = 2; */ public java.util.List getReleaseBuilderList() { return getReleaseFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getReleaseFieldBuilder() { if (releaseBuilder_ == null) { releaseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( release_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); release_ = null; } return releaseBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto blacklistRequest_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder> blacklistRequestBuilder_; /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public boolean hasBlacklistRequest() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getBlacklistRequest() { if (blacklistRequestBuilder_ == null) { return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_; } else { return blacklistRequestBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public Builder setBlacklistRequest(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto value) { if (blacklistRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } blacklistRequest_ = value; onChanged(); } else { blacklistRequestBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public Builder setBlacklistRequest( org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder builderForValue) { if (blacklistRequestBuilder_ == null) { blacklistRequest_ = builderForValue.build(); onChanged(); } else { blacklistRequestBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public Builder mergeBlacklistRequest(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto value) { if (blacklistRequestBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && blacklistRequest_ != null && blacklistRequest_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance()) { blacklistRequest_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.newBuilder(blacklistRequest_).mergeFrom(value).buildPartial(); } else { blacklistRequest_ = value; } onChanged(); } else { blacklistRequestBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public Builder clearBlacklistRequest() { if (blacklistRequestBuilder_ == null) { blacklistRequest_ = null; onChanged(); } else { blacklistRequestBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder getBlacklistRequestBuilder() { bitField0_ |= 0x00000004; onChanged(); return getBlacklistRequestFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder getBlacklistRequestOrBuilder() { if (blacklistRequestBuilder_ != null) { return blacklistRequestBuilder_.getMessageOrBuilder(); } else { return blacklistRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance() : blacklistRequest_; } } /** * optional .hadoop.yarn.ResourceBlacklistRequestProto blacklist_request = 3; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder> getBlacklistRequestFieldBuilder() { if (blacklistRequestBuilder_ == null) { blacklistRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder>( getBlacklistRequest(), getParentForChildren(), isClean()); blacklistRequest_ = null; } return blacklistRequestBuilder_; } private int responseId_ ; /** * optional int32 response_id = 4; */ public boolean hasResponseId() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 response_id = 4; */ public int getResponseId() { return responseId_; } /** * optional int32 response_id = 4; */ public Builder setResponseId(int value) { bitField0_ |= 0x00000008; responseId_ = value; onChanged(); return this; } /** * optional int32 response_id = 4; */ public Builder clearResponseId() { bitField0_ = (bitField0_ & ~0x00000008); responseId_ = 0; onChanged(); return this; } private float progress_ ; /** * optional float progress = 5; */ public boolean hasProgress() { return ((bitField0_ & 0x00000010) != 0); } /** * optional float progress = 5; */ public float getProgress() { return progress_; } /** * optional float progress = 5; */ public Builder setProgress(float value) { bitField0_ |= 0x00000010; progress_ = value; onChanged(); return this; } /** * optional float progress = 5; */ public Builder clearProgress() { bitField0_ = (bitField0_ & ~0x00000010); progress_ = 0F; onChanged(); return this; } private java.util.List updateRequests_ = java.util.Collections.emptyList(); private void ensureUpdateRequestsIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { updateRequests_ = new java.util.ArrayList(updateRequests_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> updateRequestsBuilder_; /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public java.util.List getUpdateRequestsList() { if (updateRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(updateRequests_); } else { return updateRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public int getUpdateRequestsCount() { if (updateRequestsBuilder_ == null) { return updateRequests_.size(); } else { return updateRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto getUpdateRequests(int index) { if (updateRequestsBuilder_ == null) { return updateRequests_.get(index); } else { return updateRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder setUpdateRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) { if (updateRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateRequestsIsMutable(); updateRequests_.set(index, value); onChanged(); } else { updateRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder setUpdateRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) { if (updateRequestsBuilder_ == null) { ensureUpdateRequestsIsMutable(); updateRequests_.set(index, builderForValue.build()); onChanged(); } else { updateRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder addUpdateRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) { if (updateRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateRequestsIsMutable(); updateRequests_.add(value); onChanged(); } else { updateRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder addUpdateRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto value) { if (updateRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateRequestsIsMutable(); updateRequests_.add(index, value); onChanged(); } else { updateRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder addUpdateRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) { if (updateRequestsBuilder_ == null) { ensureUpdateRequestsIsMutable(); updateRequests_.add(builderForValue.build()); onChanged(); } else { updateRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder addUpdateRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder builderForValue) { if (updateRequestsBuilder_ == null) { ensureUpdateRequestsIsMutable(); updateRequests_.add(index, builderForValue.build()); onChanged(); } else { updateRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder addAllUpdateRequests( java.lang.Iterable values) { if (updateRequestsBuilder_ == null) { ensureUpdateRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, updateRequests_); onChanged(); } else { updateRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder clearUpdateRequests() { if (updateRequestsBuilder_ == null) { updateRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { updateRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public Builder removeUpdateRequests(int index) { if (updateRequestsBuilder_ == null) { ensureUpdateRequestsIsMutable(); updateRequests_.remove(index); onChanged(); } else { updateRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder getUpdateRequestsBuilder( int index) { return getUpdateRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder getUpdateRequestsOrBuilder( int index) { if (updateRequestsBuilder_ == null) { return updateRequests_.get(index); } else { return updateRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public java.util.List getUpdateRequestsOrBuilderList() { if (updateRequestsBuilder_ != null) { return updateRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(updateRequests_); } } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder addUpdateRequestsBuilder() { return getUpdateRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder addUpdateRequestsBuilder( int index) { return getUpdateRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdateContainerRequestProto update_requests = 7; */ public java.util.List getUpdateRequestsBuilderList() { return getUpdateRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder> getUpdateRequestsFieldBuilder() { if (updateRequestsBuilder_ == null) { updateRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerRequestProtoOrBuilder>( updateRequests_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); updateRequests_ = null; } return updateRequestsBuilder_; } private java.util.List schedulingRequests_ = java.util.Collections.emptyList(); private void ensureSchedulingRequestsIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { schedulingRequests_ = new java.util.ArrayList(schedulingRequests_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> schedulingRequestsBuilder_; /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public java.util.List getSchedulingRequestsList() { if (schedulingRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(schedulingRequests_); } else { return schedulingRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public int getSchedulingRequestsCount() { if (schedulingRequestsBuilder_ == null) { return schedulingRequests_.size(); } else { return schedulingRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getSchedulingRequests(int index) { if (schedulingRequestsBuilder_ == null) { return schedulingRequests_.get(index); } else { return schedulingRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder setSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) { if (schedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSchedulingRequestsIsMutable(); schedulingRequests_.set(index, value); onChanged(); } else { schedulingRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder setSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) { if (schedulingRequestsBuilder_ == null) { ensureSchedulingRequestsIsMutable(); schedulingRequests_.set(index, builderForValue.build()); onChanged(); } else { schedulingRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder addSchedulingRequests(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) { if (schedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSchedulingRequestsIsMutable(); schedulingRequests_.add(value); onChanged(); } else { schedulingRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder addSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) { if (schedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSchedulingRequestsIsMutable(); schedulingRequests_.add(index, value); onChanged(); } else { schedulingRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder addSchedulingRequests( org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) { if (schedulingRequestsBuilder_ == null) { ensureSchedulingRequestsIsMutable(); schedulingRequests_.add(builderForValue.build()); onChanged(); } else { schedulingRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder addSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) { if (schedulingRequestsBuilder_ == null) { ensureSchedulingRequestsIsMutable(); schedulingRequests_.add(index, builderForValue.build()); onChanged(); } else { schedulingRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder addAllSchedulingRequests( java.lang.Iterable values) { if (schedulingRequestsBuilder_ == null) { ensureSchedulingRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, schedulingRequests_); onChanged(); } else { schedulingRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder clearSchedulingRequests() { if (schedulingRequestsBuilder_ == null) { schedulingRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { schedulingRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public Builder removeSchedulingRequests(int index) { if (schedulingRequestsBuilder_ == null) { ensureSchedulingRequestsIsMutable(); schedulingRequests_.remove(index); onChanged(); } else { schedulingRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder getSchedulingRequestsBuilder( int index) { return getSchedulingRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getSchedulingRequestsOrBuilder( int index) { if (schedulingRequestsBuilder_ == null) { return schedulingRequests_.get(index); } else { return schedulingRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public java.util.List getSchedulingRequestsOrBuilderList() { if (schedulingRequestsBuilder_ != null) { return schedulingRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(schedulingRequests_); } } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder addSchedulingRequestsBuilder() { return getSchedulingRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder addSchedulingRequestsBuilder( int index) { return getSchedulingRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.SchedulingRequestProto scheduling_requests = 10; */ public java.util.List getSchedulingRequestsBuilderList() { return getSchedulingRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> getSchedulingRequestsFieldBuilder() { if (schedulingRequestsBuilder_ == null) { schedulingRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder>( schedulingRequests_, ((bitField0_ & 0x00000040) != 0), getParentForChildren(), isClean()); schedulingRequests_ = null; } return schedulingRequestsBuilder_; } private java.lang.Object trackingUrl_ = ""; /** * optional string tracking_url = 11; */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string tracking_url = 11; */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string tracking_url = 11; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string tracking_url = 11; */ public Builder setTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; trackingUrl_ = value; onChanged(); return this; } /** * optional string tracking_url = 11; */ public Builder clearTrackingUrl() { bitField0_ = (bitField0_ & ~0x00000080); trackingUrl_ = getDefaultInstance().getTrackingUrl(); onChanged(); return this; } /** * optional string tracking_url = 11; */ public Builder setTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; trackingUrl_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.AllocateRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.AllocateRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public AllocateRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new AllocateRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NMTokenProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NMTokenProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * optional .hadoop.common.TokenProto token = 2; */ boolean hasToken(); /** * optional .hadoop.common.TokenProto token = 2; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken(); /** * optional .hadoop.common.TokenProto token = 2; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.NMTokenProto} */ public static final class NMTokenProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NMTokenProto) NMTokenProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NMTokenProto.newBuilder() to construct. private NMTokenProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NMTokenProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NMTokenProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = nodeId_.toBuilder(); } nodeId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(nodeId_); nodeId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = token_.toBuilder(); } token_ = input.readMessage(org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(token_); token_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder.class); } private int bitField0_; public static final int NODEID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int TOKEN_FIELD_NUMBER = 2; private org.apache.hadoop.security.proto.SecurityProtos.TokenProto token_; /** * optional .hadoop.common.TokenProto token = 2; */ public boolean hasToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken() { return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_; } /** * optional .hadoop.common.TokenProto token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder() { return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasToken()) { if (!getToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getToken()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getToken()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto) obj; if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (hasToken() != other.hasToken()) return false; if (hasToken()) { if (!getToken() .equals(other.getToken())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeId()) { hash = (37 * hash) + NODEID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (hasToken()) { hash = (37 * hash) + TOKEN_FIELD_NUMBER; hash = (53 * hash) + getToken().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NMTokenProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NMTokenProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeIdFieldBuilder(); getTokenFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeIdBuilder_ == null) { nodeId_ = null; } else { nodeIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tokenBuilder_ == null) { token_ = null; } else { tokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_NMTokenProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (nodeIdBuilder_ == null) { result.nodeId_ = nodeId_; } else { result.nodeId_ = nodeIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (tokenBuilder_ == null) { result.token_ = token_; } else { result.token_ = tokenBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()) return this; if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (other.hasToken()) { mergeToken(other.getToken()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasToken()) { if (!getToken().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; onChanged(); } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); onChanged(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { nodeId_ = org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder(nodeId_).mergeFrom(value).buildPartial(); } else { nodeId_ = value; } onChanged(); } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder clearNodeId() { if (nodeIdBuilder_ == null) { nodeId_ = null; onChanged(); } else { nodeIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private org.apache.hadoop.security.proto.SecurityProtos.TokenProto token_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> tokenBuilder_; /** * optional .hadoop.common.TokenProto token = 2; */ public boolean hasToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getToken() { if (tokenBuilder_ == null) { return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_; } else { return tokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto token = 2; */ public Builder setToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } token_ = value; onChanged(); } else { tokenBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto token = 2; */ public Builder setToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (tokenBuilder_ == null) { token_ = builderForValue.build(); onChanged(); } else { tokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto token = 2; */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && token_ != null && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto token = 2; */ public Builder clearToken() { if (tokenBuilder_ == null) { token_ = null; onChanged(); } else { tokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.common.TokenProto token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getTokenBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getTokenOrBuilder() { if (tokenBuilder_ != null) { return tokenBuilder_.getMessageOrBuilder(); } else { return token_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : token_; } } /** * optional .hadoop.common.TokenProto token = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getTokenFieldBuilder() { if (tokenBuilder_ == null) { tokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getToken(), getParentForChildren(), isClean()); token_ = null; } return tokenBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NMTokenProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NMTokenProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NMTokenProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new NMTokenProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdatedContainerProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdatedContainerProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ boolean hasUpdateType(); /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType(); /** * required .hadoop.yarn.ContainerProto container = 2; */ boolean hasContainer(); /** * required .hadoop.yarn.ContainerProto container = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer(); /** * required .hadoop.yarn.ContainerProto container = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.UpdatedContainerProto} */ public static final class UpdatedContainerProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdatedContainerProto) UpdatedContainerProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdatedContainerProto.newBuilder() to construct. private UpdatedContainerProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdatedContainerProto() { updateType_ = 0; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdatedContainerProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; updateType_ = rawValue; } break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = container_.toBuilder(); } container_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(container_); container_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder.class); } private int bitField0_; public static final int UPDATE_TYPE_FIELD_NUMBER = 1; private int updateType_; /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public boolean hasUpdateType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(updateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result; } public static final int CONTAINER_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto container_; /** * required .hadoop.yarn.ContainerProto container = 2; */ public boolean hasContainer() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ContainerProto container = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer() { return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_; } /** * required .hadoop.yarn.ContainerProto container = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder() { return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasUpdateType()) { memoizedIsInitialized = 0; return false; } if (!hasContainer()) { memoizedIsInitialized = 0; return false; } if (!getContainer().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, updateType_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getContainer()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, updateType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getContainer()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto) obj; if (hasUpdateType() != other.hasUpdateType()) return false; if (hasUpdateType()) { if (updateType_ != other.updateType_) return false; } if (hasContainer() != other.hasContainer()) return false; if (hasContainer()) { if (!getContainer() .equals(other.getContainer())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateType()) { hash = (37 * hash) + UPDATE_TYPE_FIELD_NUMBER; hash = (53 * hash) + updateType_; } if (hasContainer()) { hash = (37 * hash) + CONTAINER_FIELD_NUMBER; hash = (53 * hash) + getContainer().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdatedContainerProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdatedContainerProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); updateType_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (containerBuilder_ == null) { container_ = null; } else { containerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdatedContainerProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.updateType_ = updateType_; if (((from_bitField0_ & 0x00000002) != 0)) { if (containerBuilder_ == null) { result.container_ = container_; } else { result.container_ = containerBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance()) return this; if (other.hasUpdateType()) { setUpdateType(other.getUpdateType()); } if (other.hasContainer()) { mergeContainer(other.getContainer()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasUpdateType()) { return false; } if (!hasContainer()) { return false; } if (!getContainer().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int updateType_ = 0; /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public boolean hasUpdateType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto getUpdateType() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.valueOf(updateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto.INCREASE_RESOURCE : result; } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public Builder setUpdateType(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; updateType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.ContainerUpdateTypeProto update_type = 1; */ public Builder clearUpdateType() { bitField0_ = (bitField0_ & ~0x00000001); updateType_ = 0; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto container_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containerBuilder_; /** * required .hadoop.yarn.ContainerProto container = 2; */ public boolean hasContainer() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ContainerProto container = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainer() { if (containerBuilder_ == null) { return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_; } else { return containerBuilder_.getMessage(); } } /** * required .hadoop.yarn.ContainerProto container = 2; */ public Builder setContainer(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } container_ = value; onChanged(); } else { containerBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerProto container = 2; */ public Builder setContainer( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { container_ = builderForValue.build(); onChanged(); } else { containerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerProto container = 2; */ public Builder mergeContainer(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containerBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && container_ != null && container_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()) { container_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.newBuilder(container_).mergeFrom(value).buildPartial(); } else { container_ = value; } onChanged(); } else { containerBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.ContainerProto container = 2; */ public Builder clearContainer() { if (containerBuilder_ == null) { container_ = null; onChanged(); } else { containerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * required .hadoop.yarn.ContainerProto container = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainerBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContainerFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ContainerProto container = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainerOrBuilder() { if (containerBuilder_ != null) { return containerBuilder_.getMessageOrBuilder(); } else { return container_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance() : container_; } } /** * required .hadoop.yarn.ContainerProto container = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> getContainerFieldBuilder() { if (containerBuilder_ == null) { containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>( getContainer(), getParentForChildren(), isClean()); container_ = null; } return containerBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdatedContainerProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdatedContainerProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdatedContainerProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdatedContainerProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface AllocateResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.AllocateResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ boolean hasAMCommand(); /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand(); /** * optional int32 response_id = 2; */ boolean hasResponseId(); /** * optional int32 response_id = 2; */ int getResponseId(); /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ java.util.List getAllocatedContainersList(); /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index); /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ int getAllocatedContainersCount(); /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ java.util.List getAllocatedContainersOrBuilderList(); /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ java.util.List getCompletedContainerStatusesList(); /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index); /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ int getCompletedContainerStatusesCount(); /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ java.util.List getCompletedContainerStatusesOrBuilderList(); /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder( int index); /** * optional .hadoop.yarn.ResourceProto limit = 5; */ boolean hasLimit(); /** * optional .hadoop.yarn.ResourceProto limit = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit(); /** * optional .hadoop.yarn.ResourceProto limit = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder(); /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ java.util.List getUpdatedNodesList(); /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index); /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ int getUpdatedNodesCount(); /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ java.util.List getUpdatedNodesOrBuilderList(); /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder( int index); /** * optional int32 num_cluster_nodes = 7; */ boolean hasNumClusterNodes(); /** * optional int32 num_cluster_nodes = 7; */ int getNumClusterNodes(); /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ boolean hasPreempt(); /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt(); /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ java.util.List getNmTokensList(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ int getNmTokensCount(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ java.util.List getNmTokensOrBuilderList(); /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder( int index); /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ boolean hasAmRmToken(); /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken(); /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder(); /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ boolean hasApplicationPriority(); /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority(); /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder(); /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ boolean hasCollectorInfo(); /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo(); /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder(); /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ java.util.List getUpdateErrorsList(); /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index); /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ int getUpdateErrorsCount(); /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ java.util.List getUpdateErrorsOrBuilderList(); /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder( int index); /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ java.util.List getUpdatedContainersList(); /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index); /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ int getUpdatedContainersCount(); /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ java.util.List getUpdatedContainersOrBuilderList(); /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ java.util.List getContainersFromPreviousAttemptsList(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ int getContainersFromPreviousAttemptsCount(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ java.util.List getContainersFromPreviousAttemptsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index); /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ java.util.List getRejectedSchedulingRequestsList(); /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index); /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ int getRejectedSchedulingRequestsCount(); /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ java.util.List getRejectedSchedulingRequestsOrBuilderList(); /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.AllocateResponseProto} */ public static final class AllocateResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.AllocateResponseProto) AllocateResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use AllocateResponseProto.newBuilder() to construct. private AllocateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private AllocateResponseProto() { aMCommand_ = 1; allocatedContainers_ = java.util.Collections.emptyList(); completedContainerStatuses_ = java.util.Collections.emptyList(); updatedNodes_ = java.util.Collections.emptyList(); nmTokens_ = java.util.Collections.emptyList(); updateErrors_ = java.util.Collections.emptyList(); updatedContainers_ = java.util.Collections.emptyList(); containersFromPreviousAttempts_ = java.util.Collections.emptyList(); rejectedSchedulingRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AllocateResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto value = org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; aMCommand_ = rawValue; } break; } case 16: { bitField0_ |= 0x00000002; responseId_ = input.readInt32(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { allocatedContainers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } allocatedContainers_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER, extensionRegistry)); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) != 0)) { completedContainerStatuses_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000008; } completedContainerStatuses_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.PARSER, extensionRegistry)); break; } case 42: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null; if (((bitField0_ & 0x00000004) != 0)) { subBuilder = limit_.toBuilder(); } limit_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(limit_); limit_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 50: { if (!((mutable_bitField0_ & 0x00000020) != 0)) { updatedNodes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000020; } updatedNodes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.PARSER, extensionRegistry)); break; } case 56: { bitField0_ |= 0x00000008; numClusterNodes_ = input.readInt32(); break; } case 66: { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder subBuilder = null; if (((bitField0_ & 0x00000010) != 0)) { subBuilder = preempt_.toBuilder(); } preempt_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(preempt_); preempt_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 74: { if (!((mutable_bitField0_ & 0x00000100) != 0)) { nmTokens_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000100; } nmTokens_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.PARSER, extensionRegistry)); break; } case 98: { org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder subBuilder = null; if (((bitField0_ & 0x00000020) != 0)) { subBuilder = amRmToken_.toBuilder(); } amRmToken_ = input.readMessage(org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(amRmToken_); amRmToken_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000020; break; } case 106: { org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder subBuilder = null; if (((bitField0_ & 0x00000040) != 0)) { subBuilder = applicationPriority_.toBuilder(); } applicationPriority_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationPriority_); applicationPriority_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000040; break; } case 114: { org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder subBuilder = null; if (((bitField0_ & 0x00000080) != 0)) { subBuilder = collectorInfo_.toBuilder(); } collectorInfo_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(collectorInfo_); collectorInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000080; break; } case 122: { if (!((mutable_bitField0_ & 0x00001000) != 0)) { updateErrors_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00001000; } updateErrors_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.PARSER, extensionRegistry)); break; } case 130: { if (!((mutable_bitField0_ & 0x00002000) != 0)) { updatedContainers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00002000; } updatedContainers_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.PARSER, extensionRegistry)); break; } case 138: { if (!((mutable_bitField0_ & 0x00004000) != 0)) { containersFromPreviousAttempts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00004000; } containersFromPreviousAttempts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.PARSER, extensionRegistry)); break; } case 146: { if (!((mutable_bitField0_ & 0x00008000) != 0)) { rejectedSchedulingRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00008000; } rejectedSchedulingRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) != 0)) { allocatedContainers_ = java.util.Collections.unmodifiableList(allocatedContainers_); } if (((mutable_bitField0_ & 0x00000008) != 0)) { completedContainerStatuses_ = java.util.Collections.unmodifiableList(completedContainerStatuses_); } if (((mutable_bitField0_ & 0x00000020) != 0)) { updatedNodes_ = java.util.Collections.unmodifiableList(updatedNodes_); } if (((mutable_bitField0_ & 0x00000100) != 0)) { nmTokens_ = java.util.Collections.unmodifiableList(nmTokens_); } if (((mutable_bitField0_ & 0x00001000) != 0)) { updateErrors_ = java.util.Collections.unmodifiableList(updateErrors_); } if (((mutable_bitField0_ & 0x00002000) != 0)) { updatedContainers_ = java.util.Collections.unmodifiableList(updatedContainers_); } if (((mutable_bitField0_ & 0x00004000) != 0)) { containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } if (((mutable_bitField0_ & 0x00008000) != 0)) { rejectedSchedulingRequests_ = java.util.Collections.unmodifiableList(rejectedSchedulingRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder.class); } private int bitField0_; public static final int A_M_COMMAND_FIELD_NUMBER = 1; private int aMCommand_; /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public boolean hasAMCommand() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.valueOf(aMCommand_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.AM_RESYNC : result; } public static final int RESPONSE_ID_FIELD_NUMBER = 2; private int responseId_; /** * optional int32 response_id = 2; */ public boolean hasResponseId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 response_id = 2; */ public int getResponseId() { return responseId_; } public static final int ALLOCATED_CONTAINERS_FIELD_NUMBER = 3; private java.util.List allocatedContainers_; /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public java.util.List getAllocatedContainersList() { return allocatedContainers_; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public java.util.List getAllocatedContainersOrBuilderList() { return allocatedContainers_; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public int getAllocatedContainersCount() { return allocatedContainers_.size(); } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) { return allocatedContainers_.get(index); } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder( int index) { return allocatedContainers_.get(index); } public static final int COMPLETED_CONTAINER_STATUSES_FIELD_NUMBER = 4; private java.util.List completedContainerStatuses_; /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public java.util.List getCompletedContainerStatusesList() { return completedContainerStatuses_; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public java.util.List getCompletedContainerStatusesOrBuilderList() { return completedContainerStatuses_; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public int getCompletedContainerStatusesCount() { return completedContainerStatuses_.size(); } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index) { return completedContainerStatuses_.get(index); } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder( int index) { return completedContainerStatuses_.get(index); } public static final int LIMIT_FIELD_NUMBER = 5; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto limit_; /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public boolean hasLimit() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit() { return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_; } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder() { return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_; } public static final int UPDATED_NODES_FIELD_NUMBER = 6; private java.util.List updatedNodes_; /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public java.util.List getUpdatedNodesList() { return updatedNodes_; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public java.util.List getUpdatedNodesOrBuilderList() { return updatedNodes_; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public int getUpdatedNodesCount() { return updatedNodes_.size(); } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index) { return updatedNodes_.get(index); } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder( int index) { return updatedNodes_.get(index); } public static final int NUM_CLUSTER_NODES_FIELD_NUMBER = 7; private int numClusterNodes_; /** * optional int32 num_cluster_nodes = 7; */ public boolean hasNumClusterNodes() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 num_cluster_nodes = 7; */ public int getNumClusterNodes() { return numClusterNodes_; } public static final int PREEMPT_FIELD_NUMBER = 8; private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto preempt_; /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public boolean hasPreempt() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt() { return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_; } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder() { return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_; } public static final int NM_TOKENS_FIELD_NUMBER = 9; private java.util.List nmTokens_; /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public java.util.List getNmTokensList() { return nmTokens_; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public java.util.List getNmTokensOrBuilderList() { return nmTokens_; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public int getNmTokensCount() { return nmTokens_.size(); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index) { return nmTokens_.get(index); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder( int index) { return nmTokens_.get(index); } public static final int AM_RM_TOKEN_FIELD_NUMBER = 12; private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_; /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public boolean hasAmRmToken() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } public static final int APPLICATION_PRIORITY_FIELD_NUMBER = 13; private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } public static final int COLLECTOR_INFO_FIELD_NUMBER = 14; private org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto collectorInfo_; /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public boolean hasCollectorInfo() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo() { return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_; } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder() { return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_; } public static final int UPDATE_ERRORS_FIELD_NUMBER = 15; private java.util.List updateErrors_; /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public java.util.List getUpdateErrorsList() { return updateErrors_; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public java.util.List getUpdateErrorsOrBuilderList() { return updateErrors_; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public int getUpdateErrorsCount() { return updateErrors_.size(); } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index) { return updateErrors_.get(index); } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder( int index) { return updateErrors_.get(index); } public static final int UPDATED_CONTAINERS_FIELD_NUMBER = 16; private java.util.List updatedContainers_; /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public java.util.List getUpdatedContainersList() { return updatedContainers_; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public java.util.List getUpdatedContainersOrBuilderList() { return updatedContainers_; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public int getUpdatedContainersCount() { return updatedContainers_.size(); } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index) { return updatedContainers_.get(index); } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder( int index) { return updatedContainers_.get(index); } public static final int CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER = 17; private java.util.List containersFromPreviousAttempts_; /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public java.util.List getContainersFromPreviousAttemptsList() { return containersFromPreviousAttempts_; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public java.util.List getContainersFromPreviousAttemptsOrBuilderList() { return containersFromPreviousAttempts_; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public int getContainersFromPreviousAttemptsCount() { return containersFromPreviousAttempts_.size(); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) { return containersFromPreviousAttempts_.get(index); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index) { return containersFromPreviousAttempts_.get(index); } public static final int REJECTED_SCHEDULING_REQUESTS_FIELD_NUMBER = 18; private java.util.List rejectedSchedulingRequests_; /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public java.util.List getRejectedSchedulingRequestsList() { return rejectedSchedulingRequests_; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public java.util.List getRejectedSchedulingRequestsOrBuilderList() { return rejectedSchedulingRequests_; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public int getRejectedSchedulingRequestsCount() { return rejectedSchedulingRequests_.size(); } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index) { return rejectedSchedulingRequests_.get(index); } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder( int index) { return rejectedSchedulingRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getAllocatedContainersCount(); i++) { if (!getAllocatedContainers(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getCompletedContainerStatusesCount(); i++) { if (!getCompletedContainerStatuses(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasLimit()) { if (!getLimit().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getUpdatedNodesCount(); i++) { if (!getUpdatedNodes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasPreempt()) { if (!getPreempt().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getNmTokensCount(); i++) { if (!getNmTokens(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasAmRmToken()) { if (!getAmRmToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasCollectorInfo()) { if (!getCollectorInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getUpdateErrorsCount(); i++) { if (!getUpdateErrors(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getUpdatedContainersCount(); i++) { if (!getUpdatedContainers(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) { if (!getContainersFromPreviousAttempts(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getRejectedSchedulingRequestsCount(); i++) { if (!getRejectedSchedulingRequests(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, aMCommand_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, responseId_); } for (int i = 0; i < allocatedContainers_.size(); i++) { output.writeMessage(3, allocatedContainers_.get(i)); } for (int i = 0; i < completedContainerStatuses_.size(); i++) { output.writeMessage(4, completedContainerStatuses_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(5, getLimit()); } for (int i = 0; i < updatedNodes_.size(); i++) { output.writeMessage(6, updatedNodes_.get(i)); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt32(7, numClusterNodes_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(8, getPreempt()); } for (int i = 0; i < nmTokens_.size(); i++) { output.writeMessage(9, nmTokens_.get(i)); } if (((bitField0_ & 0x00000020) != 0)) { output.writeMessage(12, getAmRmToken()); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(13, getApplicationPriority()); } if (((bitField0_ & 0x00000080) != 0)) { output.writeMessage(14, getCollectorInfo()); } for (int i = 0; i < updateErrors_.size(); i++) { output.writeMessage(15, updateErrors_.get(i)); } for (int i = 0; i < updatedContainers_.size(); i++) { output.writeMessage(16, updatedContainers_.get(i)); } for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) { output.writeMessage(17, containersFromPreviousAttempts_.get(i)); } for (int i = 0; i < rejectedSchedulingRequests_.size(); i++) { output.writeMessage(18, rejectedSchedulingRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, aMCommand_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, responseId_); } for (int i = 0; i < allocatedContainers_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, allocatedContainers_.get(i)); } for (int i = 0; i < completedContainerStatuses_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, completedContainerStatuses_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getLimit()); } for (int i = 0; i < updatedNodes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, updatedNodes_.get(i)); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(7, numClusterNodes_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(8, getPreempt()); } for (int i = 0; i < nmTokens_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(9, nmTokens_.get(i)); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(12, getAmRmToken()); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(13, getApplicationPriority()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(14, getCollectorInfo()); } for (int i = 0; i < updateErrors_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(15, updateErrors_.get(i)); } for (int i = 0; i < updatedContainers_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(16, updatedContainers_.get(i)); } for (int i = 0; i < containersFromPreviousAttempts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(17, containersFromPreviousAttempts_.get(i)); } for (int i = 0; i < rejectedSchedulingRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(18, rejectedSchedulingRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) obj; if (hasAMCommand() != other.hasAMCommand()) return false; if (hasAMCommand()) { if (aMCommand_ != other.aMCommand_) return false; } if (hasResponseId() != other.hasResponseId()) return false; if (hasResponseId()) { if (getResponseId() != other.getResponseId()) return false; } if (!getAllocatedContainersList() .equals(other.getAllocatedContainersList())) return false; if (!getCompletedContainerStatusesList() .equals(other.getCompletedContainerStatusesList())) return false; if (hasLimit() != other.hasLimit()) return false; if (hasLimit()) { if (!getLimit() .equals(other.getLimit())) return false; } if (!getUpdatedNodesList() .equals(other.getUpdatedNodesList())) return false; if (hasNumClusterNodes() != other.hasNumClusterNodes()) return false; if (hasNumClusterNodes()) { if (getNumClusterNodes() != other.getNumClusterNodes()) return false; } if (hasPreempt() != other.hasPreempt()) return false; if (hasPreempt()) { if (!getPreempt() .equals(other.getPreempt())) return false; } if (!getNmTokensList() .equals(other.getNmTokensList())) return false; if (hasAmRmToken() != other.hasAmRmToken()) return false; if (hasAmRmToken()) { if (!getAmRmToken() .equals(other.getAmRmToken())) return false; } if (hasApplicationPriority() != other.hasApplicationPriority()) return false; if (hasApplicationPriority()) { if (!getApplicationPriority() .equals(other.getApplicationPriority())) return false; } if (hasCollectorInfo() != other.hasCollectorInfo()) return false; if (hasCollectorInfo()) { if (!getCollectorInfo() .equals(other.getCollectorInfo())) return false; } if (!getUpdateErrorsList() .equals(other.getUpdateErrorsList())) return false; if (!getUpdatedContainersList() .equals(other.getUpdatedContainersList())) return false; if (!getContainersFromPreviousAttemptsList() .equals(other.getContainersFromPreviousAttemptsList())) return false; if (!getRejectedSchedulingRequestsList() .equals(other.getRejectedSchedulingRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAMCommand()) { hash = (37 * hash) + A_M_COMMAND_FIELD_NUMBER; hash = (53 * hash) + aMCommand_; } if (hasResponseId()) { hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER; hash = (53 * hash) + getResponseId(); } if (getAllocatedContainersCount() > 0) { hash = (37 * hash) + ALLOCATED_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getAllocatedContainersList().hashCode(); } if (getCompletedContainerStatusesCount() > 0) { hash = (37 * hash) + COMPLETED_CONTAINER_STATUSES_FIELD_NUMBER; hash = (53 * hash) + getCompletedContainerStatusesList().hashCode(); } if (hasLimit()) { hash = (37 * hash) + LIMIT_FIELD_NUMBER; hash = (53 * hash) + getLimit().hashCode(); } if (getUpdatedNodesCount() > 0) { hash = (37 * hash) + UPDATED_NODES_FIELD_NUMBER; hash = (53 * hash) + getUpdatedNodesList().hashCode(); } if (hasNumClusterNodes()) { hash = (37 * hash) + NUM_CLUSTER_NODES_FIELD_NUMBER; hash = (53 * hash) + getNumClusterNodes(); } if (hasPreempt()) { hash = (37 * hash) + PREEMPT_FIELD_NUMBER; hash = (53 * hash) + getPreempt().hashCode(); } if (getNmTokensCount() > 0) { hash = (37 * hash) + NM_TOKENS_FIELD_NUMBER; hash = (53 * hash) + getNmTokensList().hashCode(); } if (hasAmRmToken()) { hash = (37 * hash) + AM_RM_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getAmRmToken().hashCode(); } if (hasApplicationPriority()) { hash = (37 * hash) + APPLICATION_PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getApplicationPriority().hashCode(); } if (hasCollectorInfo()) { hash = (37 * hash) + COLLECTOR_INFO_FIELD_NUMBER; hash = (53 * hash) + getCollectorInfo().hashCode(); } if (getUpdateErrorsCount() > 0) { hash = (37 * hash) + UPDATE_ERRORS_FIELD_NUMBER; hash = (53 * hash) + getUpdateErrorsList().hashCode(); } if (getUpdatedContainersCount() > 0) { hash = (37 * hash) + UPDATED_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getUpdatedContainersList().hashCode(); } if (getContainersFromPreviousAttemptsCount() > 0) { hash = (37 * hash) + CONTAINERS_FROM_PREVIOUS_ATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + getContainersFromPreviousAttemptsList().hashCode(); } if (getRejectedSchedulingRequestsCount() > 0) { hash = (37 * hash) + REJECTED_SCHEDULING_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getRejectedSchedulingRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.AllocateResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.AllocateResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAllocatedContainersFieldBuilder(); getCompletedContainerStatusesFieldBuilder(); getLimitFieldBuilder(); getUpdatedNodesFieldBuilder(); getPreemptFieldBuilder(); getNmTokensFieldBuilder(); getAmRmTokenFieldBuilder(); getApplicationPriorityFieldBuilder(); getCollectorInfoFieldBuilder(); getUpdateErrorsFieldBuilder(); getUpdatedContainersFieldBuilder(); getContainersFromPreviousAttemptsFieldBuilder(); getRejectedSchedulingRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); aMCommand_ = 1; bitField0_ = (bitField0_ & ~0x00000001); responseId_ = 0; bitField0_ = (bitField0_ & ~0x00000002); if (allocatedContainersBuilder_ == null) { allocatedContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { allocatedContainersBuilder_.clear(); } if (completedContainerStatusesBuilder_ == null) { completedContainerStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { completedContainerStatusesBuilder_.clear(); } if (limitBuilder_ == null) { limit_ = null; } else { limitBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (updatedNodesBuilder_ == null) { updatedNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { updatedNodesBuilder_.clear(); } numClusterNodes_ = 0; bitField0_ = (bitField0_ & ~0x00000040); if (preemptBuilder_ == null) { preempt_ = null; } else { preemptBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); if (nmTokensBuilder_ == null) { nmTokens_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); } else { nmTokensBuilder_.clear(); } if (amRmTokenBuilder_ == null) { amRmToken_ = null; } else { amRmTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000400); if (collectorInfoBuilder_ == null) { collectorInfo_ = null; } else { collectorInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000800); if (updateErrorsBuilder_ == null) { updateErrors_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00001000); } else { updateErrorsBuilder_.clear(); } if (updatedContainersBuilder_ == null) { updatedContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00002000); } else { updatedContainersBuilder_.clear(); } if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00004000); } else { containersFromPreviousAttemptsBuilder_.clear(); } if (rejectedSchedulingRequestsBuilder_ == null) { rejectedSchedulingRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00008000); } else { rejectedSchedulingRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_AllocateResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.aMCommand_ = aMCommand_; if (((from_bitField0_ & 0x00000002) != 0)) { result.responseId_ = responseId_; to_bitField0_ |= 0x00000002; } if (allocatedContainersBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { allocatedContainers_ = java.util.Collections.unmodifiableList(allocatedContainers_); bitField0_ = (bitField0_ & ~0x00000004); } result.allocatedContainers_ = allocatedContainers_; } else { result.allocatedContainers_ = allocatedContainersBuilder_.build(); } if (completedContainerStatusesBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0)) { completedContainerStatuses_ = java.util.Collections.unmodifiableList(completedContainerStatuses_); bitField0_ = (bitField0_ & ~0x00000008); } result.completedContainerStatuses_ = completedContainerStatuses_; } else { result.completedContainerStatuses_ = completedContainerStatusesBuilder_.build(); } if (((from_bitField0_ & 0x00000010) != 0)) { if (limitBuilder_ == null) { result.limit_ = limit_; } else { result.limit_ = limitBuilder_.build(); } to_bitField0_ |= 0x00000004; } if (updatedNodesBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { updatedNodes_ = java.util.Collections.unmodifiableList(updatedNodes_); bitField0_ = (bitField0_ & ~0x00000020); } result.updatedNodes_ = updatedNodes_; } else { result.updatedNodes_ = updatedNodesBuilder_.build(); } if (((from_bitField0_ & 0x00000040) != 0)) { result.numClusterNodes_ = numClusterNodes_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000080) != 0)) { if (preemptBuilder_ == null) { result.preempt_ = preempt_; } else { result.preempt_ = preemptBuilder_.build(); } to_bitField0_ |= 0x00000010; } if (nmTokensBuilder_ == null) { if (((bitField0_ & 0x00000100) != 0)) { nmTokens_ = java.util.Collections.unmodifiableList(nmTokens_); bitField0_ = (bitField0_ & ~0x00000100); } result.nmTokens_ = nmTokens_; } else { result.nmTokens_ = nmTokensBuilder_.build(); } if (((from_bitField0_ & 0x00000200) != 0)) { if (amRmTokenBuilder_ == null) { result.amRmToken_ = amRmToken_; } else { result.amRmToken_ = amRmTokenBuilder_.build(); } to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000400) != 0)) { if (applicationPriorityBuilder_ == null) { result.applicationPriority_ = applicationPriority_; } else { result.applicationPriority_ = applicationPriorityBuilder_.build(); } to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000800) != 0)) { if (collectorInfoBuilder_ == null) { result.collectorInfo_ = collectorInfo_; } else { result.collectorInfo_ = collectorInfoBuilder_.build(); } to_bitField0_ |= 0x00000080; } if (updateErrorsBuilder_ == null) { if (((bitField0_ & 0x00001000) != 0)) { updateErrors_ = java.util.Collections.unmodifiableList(updateErrors_); bitField0_ = (bitField0_ & ~0x00001000); } result.updateErrors_ = updateErrors_; } else { result.updateErrors_ = updateErrorsBuilder_.build(); } if (updatedContainersBuilder_ == null) { if (((bitField0_ & 0x00002000) != 0)) { updatedContainers_ = java.util.Collections.unmodifiableList(updatedContainers_); bitField0_ = (bitField0_ & ~0x00002000); } result.updatedContainers_ = updatedContainers_; } else { result.updatedContainers_ = updatedContainersBuilder_.build(); } if (containersFromPreviousAttemptsBuilder_ == null) { if (((bitField0_ & 0x00004000) != 0)) { containersFromPreviousAttempts_ = java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); bitField0_ = (bitField0_ & ~0x00004000); } result.containersFromPreviousAttempts_ = containersFromPreviousAttempts_; } else { result.containersFromPreviousAttempts_ = containersFromPreviousAttemptsBuilder_.build(); } if (rejectedSchedulingRequestsBuilder_ == null) { if (((bitField0_ & 0x00008000) != 0)) { rejectedSchedulingRequests_ = java.util.Collections.unmodifiableList(rejectedSchedulingRequests_); bitField0_ = (bitField0_ & ~0x00008000); } result.rejectedSchedulingRequests_ = rejectedSchedulingRequests_; } else { result.rejectedSchedulingRequests_ = rejectedSchedulingRequestsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance()) return this; if (other.hasAMCommand()) { setAMCommand(other.getAMCommand()); } if (other.hasResponseId()) { setResponseId(other.getResponseId()); } if (allocatedContainersBuilder_ == null) { if (!other.allocatedContainers_.isEmpty()) { if (allocatedContainers_.isEmpty()) { allocatedContainers_ = other.allocatedContainers_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureAllocatedContainersIsMutable(); allocatedContainers_.addAll(other.allocatedContainers_); } onChanged(); } } else { if (!other.allocatedContainers_.isEmpty()) { if (allocatedContainersBuilder_.isEmpty()) { allocatedContainersBuilder_.dispose(); allocatedContainersBuilder_ = null; allocatedContainers_ = other.allocatedContainers_; bitField0_ = (bitField0_ & ~0x00000004); allocatedContainersBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAllocatedContainersFieldBuilder() : null; } else { allocatedContainersBuilder_.addAllMessages(other.allocatedContainers_); } } } if (completedContainerStatusesBuilder_ == null) { if (!other.completedContainerStatuses_.isEmpty()) { if (completedContainerStatuses_.isEmpty()) { completedContainerStatuses_ = other.completedContainerStatuses_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.addAll(other.completedContainerStatuses_); } onChanged(); } } else { if (!other.completedContainerStatuses_.isEmpty()) { if (completedContainerStatusesBuilder_.isEmpty()) { completedContainerStatusesBuilder_.dispose(); completedContainerStatusesBuilder_ = null; completedContainerStatuses_ = other.completedContainerStatuses_; bitField0_ = (bitField0_ & ~0x00000008); completedContainerStatusesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCompletedContainerStatusesFieldBuilder() : null; } else { completedContainerStatusesBuilder_.addAllMessages(other.completedContainerStatuses_); } } } if (other.hasLimit()) { mergeLimit(other.getLimit()); } if (updatedNodesBuilder_ == null) { if (!other.updatedNodes_.isEmpty()) { if (updatedNodes_.isEmpty()) { updatedNodes_ = other.updatedNodes_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureUpdatedNodesIsMutable(); updatedNodes_.addAll(other.updatedNodes_); } onChanged(); } } else { if (!other.updatedNodes_.isEmpty()) { if (updatedNodesBuilder_.isEmpty()) { updatedNodesBuilder_.dispose(); updatedNodesBuilder_ = null; updatedNodes_ = other.updatedNodes_; bitField0_ = (bitField0_ & ~0x00000020); updatedNodesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdatedNodesFieldBuilder() : null; } else { updatedNodesBuilder_.addAllMessages(other.updatedNodes_); } } } if (other.hasNumClusterNodes()) { setNumClusterNodes(other.getNumClusterNodes()); } if (other.hasPreempt()) { mergePreempt(other.getPreempt()); } if (nmTokensBuilder_ == null) { if (!other.nmTokens_.isEmpty()) { if (nmTokens_.isEmpty()) { nmTokens_ = other.nmTokens_; bitField0_ = (bitField0_ & ~0x00000100); } else { ensureNmTokensIsMutable(); nmTokens_.addAll(other.nmTokens_); } onChanged(); } } else { if (!other.nmTokens_.isEmpty()) { if (nmTokensBuilder_.isEmpty()) { nmTokensBuilder_.dispose(); nmTokensBuilder_ = null; nmTokens_ = other.nmTokens_; bitField0_ = (bitField0_ & ~0x00000100); nmTokensBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNmTokensFieldBuilder() : null; } else { nmTokensBuilder_.addAllMessages(other.nmTokens_); } } } if (other.hasAmRmToken()) { mergeAmRmToken(other.getAmRmToken()); } if (other.hasApplicationPriority()) { mergeApplicationPriority(other.getApplicationPriority()); } if (other.hasCollectorInfo()) { mergeCollectorInfo(other.getCollectorInfo()); } if (updateErrorsBuilder_ == null) { if (!other.updateErrors_.isEmpty()) { if (updateErrors_.isEmpty()) { updateErrors_ = other.updateErrors_; bitField0_ = (bitField0_ & ~0x00001000); } else { ensureUpdateErrorsIsMutable(); updateErrors_.addAll(other.updateErrors_); } onChanged(); } } else { if (!other.updateErrors_.isEmpty()) { if (updateErrorsBuilder_.isEmpty()) { updateErrorsBuilder_.dispose(); updateErrorsBuilder_ = null; updateErrors_ = other.updateErrors_; bitField0_ = (bitField0_ & ~0x00001000); updateErrorsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdateErrorsFieldBuilder() : null; } else { updateErrorsBuilder_.addAllMessages(other.updateErrors_); } } } if (updatedContainersBuilder_ == null) { if (!other.updatedContainers_.isEmpty()) { if (updatedContainers_.isEmpty()) { updatedContainers_ = other.updatedContainers_; bitField0_ = (bitField0_ & ~0x00002000); } else { ensureUpdatedContainersIsMutable(); updatedContainers_.addAll(other.updatedContainers_); } onChanged(); } } else { if (!other.updatedContainers_.isEmpty()) { if (updatedContainersBuilder_.isEmpty()) { updatedContainersBuilder_.dispose(); updatedContainersBuilder_ = null; updatedContainers_ = other.updatedContainers_; bitField0_ = (bitField0_ & ~0x00002000); updatedContainersBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdatedContainersFieldBuilder() : null; } else { updatedContainersBuilder_.addAllMessages(other.updatedContainers_); } } } if (containersFromPreviousAttemptsBuilder_ == null) { if (!other.containersFromPreviousAttempts_.isEmpty()) { if (containersFromPreviousAttempts_.isEmpty()) { containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00004000); } else { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.addAll(other.containersFromPreviousAttempts_); } onChanged(); } } else { if (!other.containersFromPreviousAttempts_.isEmpty()) { if (containersFromPreviousAttemptsBuilder_.isEmpty()) { containersFromPreviousAttemptsBuilder_.dispose(); containersFromPreviousAttemptsBuilder_ = null; containersFromPreviousAttempts_ = other.containersFromPreviousAttempts_; bitField0_ = (bitField0_ & ~0x00004000); containersFromPreviousAttemptsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainersFromPreviousAttemptsFieldBuilder() : null; } else { containersFromPreviousAttemptsBuilder_.addAllMessages(other.containersFromPreviousAttempts_); } } } if (rejectedSchedulingRequestsBuilder_ == null) { if (!other.rejectedSchedulingRequests_.isEmpty()) { if (rejectedSchedulingRequests_.isEmpty()) { rejectedSchedulingRequests_ = other.rejectedSchedulingRequests_; bitField0_ = (bitField0_ & ~0x00008000); } else { ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.addAll(other.rejectedSchedulingRequests_); } onChanged(); } } else { if (!other.rejectedSchedulingRequests_.isEmpty()) { if (rejectedSchedulingRequestsBuilder_.isEmpty()) { rejectedSchedulingRequestsBuilder_.dispose(); rejectedSchedulingRequestsBuilder_ = null; rejectedSchedulingRequests_ = other.rejectedSchedulingRequests_; bitField0_ = (bitField0_ & ~0x00008000); rejectedSchedulingRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRejectedSchedulingRequestsFieldBuilder() : null; } else { rejectedSchedulingRequestsBuilder_.addAllMessages(other.rejectedSchedulingRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getAllocatedContainersCount(); i++) { if (!getAllocatedContainers(i).isInitialized()) { return false; } } for (int i = 0; i < getCompletedContainerStatusesCount(); i++) { if (!getCompletedContainerStatuses(i).isInitialized()) { return false; } } if (hasLimit()) { if (!getLimit().isInitialized()) { return false; } } for (int i = 0; i < getUpdatedNodesCount(); i++) { if (!getUpdatedNodes(i).isInitialized()) { return false; } } if (hasPreempt()) { if (!getPreempt().isInitialized()) { return false; } } for (int i = 0; i < getNmTokensCount(); i++) { if (!getNmTokens(i).isInitialized()) { return false; } } if (hasAmRmToken()) { if (!getAmRmToken().isInitialized()) { return false; } } if (hasCollectorInfo()) { if (!getCollectorInfo().isInitialized()) { return false; } } for (int i = 0; i < getUpdateErrorsCount(); i++) { if (!getUpdateErrors(i).isInitialized()) { return false; } } for (int i = 0; i < getUpdatedContainersCount(); i++) { if (!getUpdatedContainers(i).isInitialized()) { return false; } } for (int i = 0; i < getContainersFromPreviousAttemptsCount(); i++) { if (!getContainersFromPreviousAttempts(i).isInitialized()) { return false; } } for (int i = 0; i < getRejectedSchedulingRequestsCount(); i++) { if (!getRejectedSchedulingRequests(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int aMCommand_ = 1; /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public boolean hasAMCommand() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto getAMCommand() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.valueOf(aMCommand_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto.AM_RESYNC : result; } /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public Builder setAMCommand(org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; aMCommand_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.AMCommandProto a_m_command = 1; */ public Builder clearAMCommand() { bitField0_ = (bitField0_ & ~0x00000001); aMCommand_ = 1; onChanged(); return this; } private int responseId_ ; /** * optional int32 response_id = 2; */ public boolean hasResponseId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 response_id = 2; */ public int getResponseId() { return responseId_; } /** * optional int32 response_id = 2; */ public Builder setResponseId(int value) { bitField0_ |= 0x00000002; responseId_ = value; onChanged(); return this; } /** * optional int32 response_id = 2; */ public Builder clearResponseId() { bitField0_ = (bitField0_ & ~0x00000002); responseId_ = 0; onChanged(); return this; } private java.util.List allocatedContainers_ = java.util.Collections.emptyList(); private void ensureAllocatedContainersIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { allocatedContainers_ = new java.util.ArrayList(allocatedContainers_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> allocatedContainersBuilder_; /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public java.util.List getAllocatedContainersList() { if (allocatedContainersBuilder_ == null) { return java.util.Collections.unmodifiableList(allocatedContainers_); } else { return allocatedContainersBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public int getAllocatedContainersCount() { if (allocatedContainersBuilder_ == null) { return allocatedContainers_.size(); } else { return allocatedContainersBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getAllocatedContainers(int index) { if (allocatedContainersBuilder_ == null) { return allocatedContainers_.get(index); } else { return allocatedContainersBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder setAllocatedContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (allocatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocatedContainersIsMutable(); allocatedContainers_.set(index, value); onChanged(); } else { allocatedContainersBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder setAllocatedContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (allocatedContainersBuilder_ == null) { ensureAllocatedContainersIsMutable(); allocatedContainers_.set(index, builderForValue.build()); onChanged(); } else { allocatedContainersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder addAllocatedContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (allocatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocatedContainersIsMutable(); allocatedContainers_.add(value); onChanged(); } else { allocatedContainersBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder addAllocatedContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (allocatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocatedContainersIsMutable(); allocatedContainers_.add(index, value); onChanged(); } else { allocatedContainersBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder addAllocatedContainers( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (allocatedContainersBuilder_ == null) { ensureAllocatedContainersIsMutable(); allocatedContainers_.add(builderForValue.build()); onChanged(); } else { allocatedContainersBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder addAllocatedContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (allocatedContainersBuilder_ == null) { ensureAllocatedContainersIsMutable(); allocatedContainers_.add(index, builderForValue.build()); onChanged(); } else { allocatedContainersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder addAllAllocatedContainers( java.lang.Iterable values) { if (allocatedContainersBuilder_ == null) { ensureAllocatedContainersIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, allocatedContainers_); onChanged(); } else { allocatedContainersBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder clearAllocatedContainers() { if (allocatedContainersBuilder_ == null) { allocatedContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { allocatedContainersBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public Builder removeAllocatedContainers(int index) { if (allocatedContainersBuilder_ == null) { ensureAllocatedContainersIsMutable(); allocatedContainers_.remove(index); onChanged(); } else { allocatedContainersBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getAllocatedContainersBuilder( int index) { return getAllocatedContainersFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getAllocatedContainersOrBuilder( int index) { if (allocatedContainersBuilder_ == null) { return allocatedContainers_.get(index); } else { return allocatedContainersBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public java.util.List getAllocatedContainersOrBuilderList() { if (allocatedContainersBuilder_ != null) { return allocatedContainersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(allocatedContainers_); } } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder() { return getAllocatedContainersFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addAllocatedContainersBuilder( int index) { return getAllocatedContainersFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto allocated_containers = 3; */ public java.util.List getAllocatedContainersBuilderList() { return getAllocatedContainersFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> getAllocatedContainersFieldBuilder() { if (allocatedContainersBuilder_ == null) { allocatedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>( allocatedContainers_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); allocatedContainers_ = null; } return allocatedContainersBuilder_; } private java.util.List completedContainerStatuses_ = java.util.Collections.emptyList(); private void ensureCompletedContainerStatusesIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { completedContainerStatuses_ = new java.util.ArrayList(completedContainerStatuses_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> completedContainerStatusesBuilder_; /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public java.util.List getCompletedContainerStatusesList() { if (completedContainerStatusesBuilder_ == null) { return java.util.Collections.unmodifiableList(completedContainerStatuses_); } else { return completedContainerStatusesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public int getCompletedContainerStatusesCount() { if (completedContainerStatusesBuilder_ == null) { return completedContainerStatuses_.size(); } else { return completedContainerStatusesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getCompletedContainerStatuses(int index) { if (completedContainerStatusesBuilder_ == null) { return completedContainerStatuses_.get(index); } else { return completedContainerStatusesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder setCompletedContainerStatuses( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (completedContainerStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.set(index, value); onChanged(); } else { completedContainerStatusesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder setCompletedContainerStatuses( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (completedContainerStatusesBuilder_ == null) { ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.set(index, builderForValue.build()); onChanged(); } else { completedContainerStatusesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder addCompletedContainerStatuses(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (completedContainerStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.add(value); onChanged(); } else { completedContainerStatusesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder addCompletedContainerStatuses( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (completedContainerStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.add(index, value); onChanged(); } else { completedContainerStatusesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder addCompletedContainerStatuses( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (completedContainerStatusesBuilder_ == null) { ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.add(builderForValue.build()); onChanged(); } else { completedContainerStatusesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder addCompletedContainerStatuses( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (completedContainerStatusesBuilder_ == null) { ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.add(index, builderForValue.build()); onChanged(); } else { completedContainerStatusesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder addAllCompletedContainerStatuses( java.lang.Iterable values) { if (completedContainerStatusesBuilder_ == null) { ensureCompletedContainerStatusesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, completedContainerStatuses_); onChanged(); } else { completedContainerStatusesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder clearCompletedContainerStatuses() { if (completedContainerStatusesBuilder_ == null) { completedContainerStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { completedContainerStatusesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public Builder removeCompletedContainerStatuses(int index) { if (completedContainerStatusesBuilder_ == null) { ensureCompletedContainerStatusesIsMutable(); completedContainerStatuses_.remove(index); onChanged(); } else { completedContainerStatusesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder getCompletedContainerStatusesBuilder( int index) { return getCompletedContainerStatusesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getCompletedContainerStatusesOrBuilder( int index) { if (completedContainerStatusesBuilder_ == null) { return completedContainerStatuses_.get(index); } else { return completedContainerStatusesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public java.util.List getCompletedContainerStatusesOrBuilderList() { if (completedContainerStatusesBuilder_ != null) { return completedContainerStatusesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(completedContainerStatuses_); } } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addCompletedContainerStatusesBuilder() { return getCompletedContainerStatusesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addCompletedContainerStatusesBuilder( int index) { return getCompletedContainerStatusesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerStatusProto completed_container_statuses = 4; */ public java.util.List getCompletedContainerStatusesBuilderList() { return getCompletedContainerStatusesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> getCompletedContainerStatusesFieldBuilder() { if (completedContainerStatusesBuilder_ == null) { completedContainerStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder>( completedContainerStatuses_, ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); completedContainerStatuses_ = null; } return completedContainerStatusesBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto limit_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> limitBuilder_; /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public boolean hasLimit() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getLimit() { if (limitBuilder_ == null) { return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_; } else { return limitBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public Builder setLimit(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (limitBuilder_ == null) { if (value == null) { throw new NullPointerException(); } limit_ = value; onChanged(); } else { limitBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public Builder setLimit( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (limitBuilder_ == null) { limit_ = builderForValue.build(); onChanged(); } else { limitBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public Builder mergeLimit(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (limitBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && limit_ != null && limit_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { limit_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(limit_).mergeFrom(value).buildPartial(); } else { limit_ = value; } onChanged(); } else { limitBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public Builder clearLimit() { if (limitBuilder_ == null) { limit_ = null; onChanged(); } else { limitBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getLimitBuilder() { bitField0_ |= 0x00000010; onChanged(); return getLimitFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getLimitOrBuilder() { if (limitBuilder_ != null) { return limitBuilder_.getMessageOrBuilder(); } else { return limit_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : limit_; } } /** * optional .hadoop.yarn.ResourceProto limit = 5; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getLimitFieldBuilder() { if (limitBuilder_ == null) { limitBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getLimit(), getParentForChildren(), isClean()); limit_ = null; } return limitBuilder_; } private java.util.List updatedNodes_ = java.util.Collections.emptyList(); private void ensureUpdatedNodesIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { updatedNodes_ = new java.util.ArrayList(updatedNodes_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> updatedNodesBuilder_; /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public java.util.List getUpdatedNodesList() { if (updatedNodesBuilder_ == null) { return java.util.Collections.unmodifiableList(updatedNodes_); } else { return updatedNodesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public int getUpdatedNodesCount() { if (updatedNodesBuilder_ == null) { return updatedNodes_.size(); } else { return updatedNodesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getUpdatedNodes(int index) { if (updatedNodesBuilder_ == null) { return updatedNodes_.get(index); } else { return updatedNodesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder setUpdatedNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (updatedNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedNodesIsMutable(); updatedNodes_.set(index, value); onChanged(); } else { updatedNodesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder setUpdatedNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (updatedNodesBuilder_ == null) { ensureUpdatedNodesIsMutable(); updatedNodes_.set(index, builderForValue.build()); onChanged(); } else { updatedNodesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder addUpdatedNodes(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (updatedNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedNodesIsMutable(); updatedNodes_.add(value); onChanged(); } else { updatedNodesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder addUpdatedNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (updatedNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedNodesIsMutable(); updatedNodes_.add(index, value); onChanged(); } else { updatedNodesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder addUpdatedNodes( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (updatedNodesBuilder_ == null) { ensureUpdatedNodesIsMutable(); updatedNodes_.add(builderForValue.build()); onChanged(); } else { updatedNodesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder addUpdatedNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (updatedNodesBuilder_ == null) { ensureUpdatedNodesIsMutable(); updatedNodes_.add(index, builderForValue.build()); onChanged(); } else { updatedNodesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder addAllUpdatedNodes( java.lang.Iterable values) { if (updatedNodesBuilder_ == null) { ensureUpdatedNodesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, updatedNodes_); onChanged(); } else { updatedNodesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder clearUpdatedNodes() { if (updatedNodesBuilder_ == null) { updatedNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { updatedNodesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public Builder removeUpdatedNodes(int index) { if (updatedNodesBuilder_ == null) { ensureUpdatedNodesIsMutable(); updatedNodes_.remove(index); onChanged(); } else { updatedNodesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder getUpdatedNodesBuilder( int index) { return getUpdatedNodesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getUpdatedNodesOrBuilder( int index) { if (updatedNodesBuilder_ == null) { return updatedNodes_.get(index); } else { return updatedNodesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public java.util.List getUpdatedNodesOrBuilderList() { if (updatedNodesBuilder_ != null) { return updatedNodesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(updatedNodes_); } } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addUpdatedNodesBuilder() { return getUpdatedNodesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addUpdatedNodesBuilder( int index) { return getUpdatedNodesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeReportProto updated_nodes = 6; */ public java.util.List getUpdatedNodesBuilderList() { return getUpdatedNodesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> getUpdatedNodesFieldBuilder() { if (updatedNodesBuilder_ == null) { updatedNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder>( updatedNodes_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); updatedNodes_ = null; } return updatedNodesBuilder_; } private int numClusterNodes_ ; /** * optional int32 num_cluster_nodes = 7; */ public boolean hasNumClusterNodes() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int32 num_cluster_nodes = 7; */ public int getNumClusterNodes() { return numClusterNodes_; } /** * optional int32 num_cluster_nodes = 7; */ public Builder setNumClusterNodes(int value) { bitField0_ |= 0x00000040; numClusterNodes_ = value; onChanged(); return this; } /** * optional int32 num_cluster_nodes = 7; */ public Builder clearNumClusterNodes() { bitField0_ = (bitField0_ & ~0x00000040); numClusterNodes_ = 0; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto preempt_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder> preemptBuilder_; /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public boolean hasPreempt() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getPreempt() { if (preemptBuilder_ == null) { return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_; } else { return preemptBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public Builder setPreempt(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto value) { if (preemptBuilder_ == null) { if (value == null) { throw new NullPointerException(); } preempt_ = value; onChanged(); } else { preemptBuilder_.setMessage(value); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public Builder setPreempt( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder builderForValue) { if (preemptBuilder_ == null) { preempt_ = builderForValue.build(); onChanged(); } else { preemptBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public Builder mergePreempt(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto value) { if (preemptBuilder_ == null) { if (((bitField0_ & 0x00000080) != 0) && preempt_ != null && preempt_ != org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance()) { preempt_ = org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.newBuilder(preempt_).mergeFrom(value).buildPartial(); } else { preempt_ = value; } onChanged(); } else { preemptBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public Builder clearPreempt() { if (preemptBuilder_ == null) { preempt_ = null; onChanged(); } else { preemptBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); return this; } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder getPreemptBuilder() { bitField0_ |= 0x00000080; onChanged(); return getPreemptFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder getPreemptOrBuilder() { if (preemptBuilder_ != null) { return preemptBuilder_.getMessageOrBuilder(); } else { return preempt_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance() : preempt_; } } /** * optional .hadoop.yarn.PreemptionMessageProto preempt = 8; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder> getPreemptFieldBuilder() { if (preemptBuilder_ == null) { preemptBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder>( getPreempt(), getParentForChildren(), isClean()); preempt_ = null; } return preemptBuilder_; } private java.util.List nmTokens_ = java.util.Collections.emptyList(); private void ensureNmTokensIsMutable() { if (!((bitField0_ & 0x00000100) != 0)) { nmTokens_ = new java.util.ArrayList(nmTokens_); bitField0_ |= 0x00000100; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> nmTokensBuilder_; /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public java.util.List getNmTokensList() { if (nmTokensBuilder_ == null) { return java.util.Collections.unmodifiableList(nmTokens_); } else { return nmTokensBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public int getNmTokensCount() { if (nmTokensBuilder_ == null) { return nmTokens_.size(); } else { return nmTokensBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto getNmTokens(int index) { if (nmTokensBuilder_ == null) { return nmTokens_.get(index); } else { return nmTokensBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder setNmTokens( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensIsMutable(); nmTokens_.set(index, value); onChanged(); } else { nmTokensBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder setNmTokens( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensBuilder_ == null) { ensureNmTokensIsMutable(); nmTokens_.set(index, builderForValue.build()); onChanged(); } else { nmTokensBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder addNmTokens(org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensIsMutable(); nmTokens_.add(value); onChanged(); } else { nmTokensBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder addNmTokens( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto value) { if (nmTokensBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNmTokensIsMutable(); nmTokens_.add(index, value); onChanged(); } else { nmTokensBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder addNmTokens( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensBuilder_ == null) { ensureNmTokensIsMutable(); nmTokens_.add(builderForValue.build()); onChanged(); } else { nmTokensBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder addNmTokens( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder builderForValue) { if (nmTokensBuilder_ == null) { ensureNmTokensIsMutable(); nmTokens_.add(index, builderForValue.build()); onChanged(); } else { nmTokensBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder addAllNmTokens( java.lang.Iterable values) { if (nmTokensBuilder_ == null) { ensureNmTokensIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nmTokens_); onChanged(); } else { nmTokensBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder clearNmTokens() { if (nmTokensBuilder_ == null) { nmTokens_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); } else { nmTokensBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public Builder removeNmTokens(int index) { if (nmTokensBuilder_ == null) { ensureNmTokensIsMutable(); nmTokens_.remove(index); onChanged(); } else { nmTokensBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder getNmTokensBuilder( int index) { return getNmTokensFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder getNmTokensOrBuilder( int index) { if (nmTokensBuilder_ == null) { return nmTokens_.get(index); } else { return nmTokensBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public java.util.List getNmTokensOrBuilderList() { if (nmTokensBuilder_ != null) { return nmTokensBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nmTokens_); } } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensBuilder() { return getNmTokensFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder addNmTokensBuilder( int index) { return getNmTokensFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NMTokenProto nm_tokens = 9; */ public java.util.List getNmTokensBuilderList() { return getNmTokensFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder> getNmTokensFieldBuilder() { if (nmTokensBuilder_ == null) { nmTokensBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProtoOrBuilder>( nmTokens_, ((bitField0_ & 0x00000100) != 0), getParentForChildren(), isClean()); nmTokens_ = null; } return nmTokensBuilder_; } private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> amRmTokenBuilder_; /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public boolean hasAmRmToken() { return ((bitField0_ & 0x00000200) != 0); } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() { if (amRmTokenBuilder_ == null) { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } else { return amRmTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public Builder setAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (amRmTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } amRmToken_ = value; onChanged(); } else { amRmTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000200; return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public Builder setAmRmToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (amRmTokenBuilder_ == null) { amRmToken_ = builderForValue.build(); onChanged(); } else { amRmTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public Builder mergeAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (amRmTokenBuilder_ == null) { if (((bitField0_ & 0x00000200) != 0) && amRmToken_ != null && amRmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { amRmToken_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(amRmToken_).mergeFrom(value).buildPartial(); } else { amRmToken_ = value; } onChanged(); } else { amRmTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public Builder clearAmRmToken() { if (amRmTokenBuilder_ == null) { amRmToken_ = null; onChanged(); } else { amRmTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getAmRmTokenBuilder() { bitField0_ |= 0x00000200; onChanged(); return getAmRmTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() { if (amRmTokenBuilder_ != null) { return amRmTokenBuilder_.getMessageOrBuilder(); } else { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } } /** * optional .hadoop.common.TokenProto am_rm_token = 12; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getAmRmTokenFieldBuilder() { if (amRmTokenBuilder_ == null) { amRmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getAmRmToken(), getParentForChildren(), isClean()); amRmToken_ = null; } return amRmTokenBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_; /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000400) != 0); } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { if (applicationPriorityBuilder_ == null) { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } else { return applicationPriorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationPriority_ = value; onChanged(); } else { applicationPriorityBuilder_.setMessage(value); } bitField0_ |= 0x00000400; return this; } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public Builder setApplicationPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (applicationPriorityBuilder_ == null) { applicationPriority_ = builderForValue.build(); onChanged(); } else { applicationPriorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000400; return this; } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (((bitField0_ & 0x00000400) != 0) && applicationPriority_ != null && applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { applicationPriority_ = org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder(applicationPriority_).mergeFrom(value).buildPartial(); } else { applicationPriority_ = value; } onChanged(); } else { applicationPriorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000400; return this; } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public Builder clearApplicationPriority() { if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; onChanged(); } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000400); return this; } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() { bitField0_ |= 0x00000400; onChanged(); return getApplicationPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { if (applicationPriorityBuilder_ != null) { return applicationPriorityBuilder_.getMessageOrBuilder(); } else { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } } /** * optional .hadoop.yarn.PriorityProto application_priority = 13; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getApplicationPriorityFieldBuilder() { if (applicationPriorityBuilder_ == null) { applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getApplicationPriority(), getParentForChildren(), isClean()); applicationPriority_ = null; } return applicationPriorityBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto collectorInfo_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder> collectorInfoBuilder_; /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public boolean hasCollectorInfo() { return ((bitField0_ & 0x00000800) != 0); } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getCollectorInfo() { if (collectorInfoBuilder_ == null) { return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_; } else { return collectorInfoBuilder_.getMessage(); } } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public Builder setCollectorInfo(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto value) { if (collectorInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } collectorInfo_ = value; onChanged(); } else { collectorInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000800; return this; } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public Builder setCollectorInfo( org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder builderForValue) { if (collectorInfoBuilder_ == null) { collectorInfo_ = builderForValue.build(); onChanged(); } else { collectorInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000800; return this; } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public Builder mergeCollectorInfo(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto value) { if (collectorInfoBuilder_ == null) { if (((bitField0_ & 0x00000800) != 0) && collectorInfo_ != null && collectorInfo_ != org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance()) { collectorInfo_ = org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.newBuilder(collectorInfo_).mergeFrom(value).buildPartial(); } else { collectorInfo_ = value; } onChanged(); } else { collectorInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000800; return this; } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public Builder clearCollectorInfo() { if (collectorInfoBuilder_ == null) { collectorInfo_ = null; onChanged(); } else { collectorInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000800); return this; } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder getCollectorInfoBuilder() { bitField0_ |= 0x00000800; onChanged(); return getCollectorInfoFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder getCollectorInfoOrBuilder() { if (collectorInfoBuilder_ != null) { return collectorInfoBuilder_.getMessageOrBuilder(); } else { return collectorInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance() : collectorInfo_; } } /** * optional .hadoop.yarn.CollectorInfoProto collector_info = 14; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder> getCollectorInfoFieldBuilder() { if (collectorInfoBuilder_ == null) { collectorInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder>( getCollectorInfo(), getParentForChildren(), isClean()); collectorInfo_ = null; } return collectorInfoBuilder_; } private java.util.List updateErrors_ = java.util.Collections.emptyList(); private void ensureUpdateErrorsIsMutable() { if (!((bitField0_ & 0x00001000) != 0)) { updateErrors_ = new java.util.ArrayList(updateErrors_); bitField0_ |= 0x00001000; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> updateErrorsBuilder_; /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public java.util.List getUpdateErrorsList() { if (updateErrorsBuilder_ == null) { return java.util.Collections.unmodifiableList(updateErrors_); } else { return updateErrorsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public int getUpdateErrorsCount() { if (updateErrorsBuilder_ == null) { return updateErrors_.size(); } else { return updateErrorsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto getUpdateErrors(int index) { if (updateErrorsBuilder_ == null) { return updateErrors_.get(index); } else { return updateErrorsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder setUpdateErrors( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) { if (updateErrorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateErrorsIsMutable(); updateErrors_.set(index, value); onChanged(); } else { updateErrorsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder setUpdateErrors( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) { if (updateErrorsBuilder_ == null) { ensureUpdateErrorsIsMutable(); updateErrors_.set(index, builderForValue.build()); onChanged(); } else { updateErrorsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder addUpdateErrors(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) { if (updateErrorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateErrorsIsMutable(); updateErrors_.add(value); onChanged(); } else { updateErrorsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder addUpdateErrors( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto value) { if (updateErrorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateErrorsIsMutable(); updateErrors_.add(index, value); onChanged(); } else { updateErrorsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder addUpdateErrors( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) { if (updateErrorsBuilder_ == null) { ensureUpdateErrorsIsMutable(); updateErrors_.add(builderForValue.build()); onChanged(); } else { updateErrorsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder addUpdateErrors( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder builderForValue) { if (updateErrorsBuilder_ == null) { ensureUpdateErrorsIsMutable(); updateErrors_.add(index, builderForValue.build()); onChanged(); } else { updateErrorsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder addAllUpdateErrors( java.lang.Iterable values) { if (updateErrorsBuilder_ == null) { ensureUpdateErrorsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, updateErrors_); onChanged(); } else { updateErrorsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder clearUpdateErrors() { if (updateErrorsBuilder_ == null) { updateErrors_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00001000); onChanged(); } else { updateErrorsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public Builder removeUpdateErrors(int index) { if (updateErrorsBuilder_ == null) { ensureUpdateErrorsIsMutable(); updateErrors_.remove(index); onChanged(); } else { updateErrorsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder getUpdateErrorsBuilder( int index) { return getUpdateErrorsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder getUpdateErrorsOrBuilder( int index) { if (updateErrorsBuilder_ == null) { return updateErrors_.get(index); } else { return updateErrorsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public java.util.List getUpdateErrorsOrBuilderList() { if (updateErrorsBuilder_ != null) { return updateErrorsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(updateErrors_); } } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder addUpdateErrorsBuilder() { return getUpdateErrorsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder addUpdateErrorsBuilder( int index) { return getUpdateErrorsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdateContainerErrorProto update_errors = 15; */ public java.util.List getUpdateErrorsBuilderList() { return getUpdateErrorsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder> getUpdateErrorsFieldBuilder() { if (updateErrorsBuilder_ == null) { updateErrorsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateContainerErrorProtoOrBuilder>( updateErrors_, ((bitField0_ & 0x00001000) != 0), getParentForChildren(), isClean()); updateErrors_ = null; } return updateErrorsBuilder_; } private java.util.List updatedContainers_ = java.util.Collections.emptyList(); private void ensureUpdatedContainersIsMutable() { if (!((bitField0_ & 0x00002000) != 0)) { updatedContainers_ = new java.util.ArrayList(updatedContainers_); bitField0_ |= 0x00002000; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> updatedContainersBuilder_; /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public java.util.List getUpdatedContainersList() { if (updatedContainersBuilder_ == null) { return java.util.Collections.unmodifiableList(updatedContainers_); } else { return updatedContainersBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public int getUpdatedContainersCount() { if (updatedContainersBuilder_ == null) { return updatedContainers_.size(); } else { return updatedContainersBuilder_.getCount(); } } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto getUpdatedContainers(int index) { if (updatedContainersBuilder_ == null) { return updatedContainers_.get(index); } else { return updatedContainersBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder setUpdatedContainers( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) { if (updatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedContainersIsMutable(); updatedContainers_.set(index, value); onChanged(); } else { updatedContainersBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder setUpdatedContainers( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) { if (updatedContainersBuilder_ == null) { ensureUpdatedContainersIsMutable(); updatedContainers_.set(index, builderForValue.build()); onChanged(); } else { updatedContainersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder addUpdatedContainers(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) { if (updatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedContainersIsMutable(); updatedContainers_.add(value); onChanged(); } else { updatedContainersBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder addUpdatedContainers( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto value) { if (updatedContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdatedContainersIsMutable(); updatedContainers_.add(index, value); onChanged(); } else { updatedContainersBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder addUpdatedContainers( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) { if (updatedContainersBuilder_ == null) { ensureUpdatedContainersIsMutable(); updatedContainers_.add(builderForValue.build()); onChanged(); } else { updatedContainersBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder addUpdatedContainers( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder builderForValue) { if (updatedContainersBuilder_ == null) { ensureUpdatedContainersIsMutable(); updatedContainers_.add(index, builderForValue.build()); onChanged(); } else { updatedContainersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder addAllUpdatedContainers( java.lang.Iterable values) { if (updatedContainersBuilder_ == null) { ensureUpdatedContainersIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, updatedContainers_); onChanged(); } else { updatedContainersBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder clearUpdatedContainers() { if (updatedContainersBuilder_ == null) { updatedContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00002000); onChanged(); } else { updatedContainersBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public Builder removeUpdatedContainers(int index) { if (updatedContainersBuilder_ == null) { ensureUpdatedContainersIsMutable(); updatedContainers_.remove(index); onChanged(); } else { updatedContainersBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder getUpdatedContainersBuilder( int index) { return getUpdatedContainersFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder getUpdatedContainersOrBuilder( int index) { if (updatedContainersBuilder_ == null) { return updatedContainers_.get(index); } else { return updatedContainersBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public java.util.List getUpdatedContainersOrBuilderList() { if (updatedContainersBuilder_ != null) { return updatedContainersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(updatedContainers_); } } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder addUpdatedContainersBuilder() { return getUpdatedContainersFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder addUpdatedContainersBuilder( int index) { return getUpdatedContainersFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.UpdatedContainerProto updated_containers = 16; */ public java.util.List getUpdatedContainersBuilderList() { return getUpdatedContainersFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder> getUpdatedContainersFieldBuilder() { if (updatedContainersBuilder_ == null) { updatedContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdatedContainerProtoOrBuilder>( updatedContainers_, ((bitField0_ & 0x00002000) != 0), getParentForChildren(), isClean()); updatedContainers_ = null; } return updatedContainersBuilder_; } private java.util.List containersFromPreviousAttempts_ = java.util.Collections.emptyList(); private void ensureContainersFromPreviousAttemptsIsMutable() { if (!((bitField0_ & 0x00004000) != 0)) { containersFromPreviousAttempts_ = new java.util.ArrayList(containersFromPreviousAttempts_); bitField0_ |= 0x00004000; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> containersFromPreviousAttemptsBuilder_; /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public java.util.List getContainersFromPreviousAttemptsList() { if (containersFromPreviousAttemptsBuilder_ == null) { return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } else { return containersFromPreviousAttemptsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public int getContainersFromPreviousAttemptsCount() { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.size(); } else { return containersFromPreviousAttemptsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getContainersFromPreviousAttempts(int index) { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.get(index); } else { return containersFromPreviousAttemptsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder setContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.set(index, value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder setContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.set(index, builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder addContainersFromPreviousAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder addContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto value) { if (containersFromPreviousAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(index, value); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder addContainersFromPreviousAttempts( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder addContainersFromPreviousAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder builderForValue) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.add(index, builderForValue.build()); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder addAllContainersFromPreviousAttempts( java.lang.Iterable values) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containersFromPreviousAttempts_); onChanged(); } else { containersFromPreviousAttemptsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder clearContainersFromPreviousAttempts() { if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00004000); onChanged(); } else { containersFromPreviousAttemptsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public Builder removeContainersFromPreviousAttempts(int index) { if (containersFromPreviousAttemptsBuilder_ == null) { ensureContainersFromPreviousAttemptsIsMutable(); containersFromPreviousAttempts_.remove(index); onChanged(); } else { containersFromPreviousAttemptsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder getContainersFromPreviousAttemptsBuilder( int index) { return getContainersFromPreviousAttemptsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder getContainersFromPreviousAttemptsOrBuilder( int index) { if (containersFromPreviousAttemptsBuilder_ == null) { return containersFromPreviousAttempts_.get(index); } else { return containersFromPreviousAttemptsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public java.util.List getContainersFromPreviousAttemptsOrBuilderList() { if (containersFromPreviousAttemptsBuilder_ != null) { return containersFromPreviousAttemptsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containersFromPreviousAttempts_); } } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder() { return getContainersFromPreviousAttemptsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder addContainersFromPreviousAttemptsBuilder( int index) { return getContainersFromPreviousAttemptsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerProto containers_from_previous_attempts = 17; */ public java.util.List getContainersFromPreviousAttemptsBuilderList() { return getContainersFromPreviousAttemptsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder> getContainersFromPreviousAttemptsFieldBuilder() { if (containersFromPreviousAttemptsBuilder_ == null) { containersFromPreviousAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder>( containersFromPreviousAttempts_, ((bitField0_ & 0x00004000) != 0), getParentForChildren(), isClean()); containersFromPreviousAttempts_ = null; } return containersFromPreviousAttemptsBuilder_; } private java.util.List rejectedSchedulingRequests_ = java.util.Collections.emptyList(); private void ensureRejectedSchedulingRequestsIsMutable() { if (!((bitField0_ & 0x00008000) != 0)) { rejectedSchedulingRequests_ = new java.util.ArrayList(rejectedSchedulingRequests_); bitField0_ |= 0x00008000; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> rejectedSchedulingRequestsBuilder_; /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public java.util.List getRejectedSchedulingRequestsList() { if (rejectedSchedulingRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(rejectedSchedulingRequests_); } else { return rejectedSchedulingRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public int getRejectedSchedulingRequestsCount() { if (rejectedSchedulingRequestsBuilder_ == null) { return rejectedSchedulingRequests_.size(); } else { return rejectedSchedulingRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getRejectedSchedulingRequests(int index) { if (rejectedSchedulingRequestsBuilder_ == null) { return rejectedSchedulingRequests_.get(index); } else { return rejectedSchedulingRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder setRejectedSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) { if (rejectedSchedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.set(index, value); onChanged(); } else { rejectedSchedulingRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder setRejectedSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) { if (rejectedSchedulingRequestsBuilder_ == null) { ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.set(index, builderForValue.build()); onChanged(); } else { rejectedSchedulingRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder addRejectedSchedulingRequests(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) { if (rejectedSchedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.add(value); onChanged(); } else { rejectedSchedulingRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder addRejectedSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto value) { if (rejectedSchedulingRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.add(index, value); onChanged(); } else { rejectedSchedulingRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder addRejectedSchedulingRequests( org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) { if (rejectedSchedulingRequestsBuilder_ == null) { ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.add(builderForValue.build()); onChanged(); } else { rejectedSchedulingRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder addRejectedSchedulingRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder builderForValue) { if (rejectedSchedulingRequestsBuilder_ == null) { ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.add(index, builderForValue.build()); onChanged(); } else { rejectedSchedulingRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder addAllRejectedSchedulingRequests( java.lang.Iterable values) { if (rejectedSchedulingRequestsBuilder_ == null) { ensureRejectedSchedulingRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, rejectedSchedulingRequests_); onChanged(); } else { rejectedSchedulingRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder clearRejectedSchedulingRequests() { if (rejectedSchedulingRequestsBuilder_ == null) { rejectedSchedulingRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00008000); onChanged(); } else { rejectedSchedulingRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public Builder removeRejectedSchedulingRequests(int index) { if (rejectedSchedulingRequestsBuilder_ == null) { ensureRejectedSchedulingRequestsIsMutable(); rejectedSchedulingRequests_.remove(index); onChanged(); } else { rejectedSchedulingRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder getRejectedSchedulingRequestsBuilder( int index) { return getRejectedSchedulingRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder getRejectedSchedulingRequestsOrBuilder( int index) { if (rejectedSchedulingRequestsBuilder_ == null) { return rejectedSchedulingRequests_.get(index); } else { return rejectedSchedulingRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public java.util.List getRejectedSchedulingRequestsOrBuilderList() { if (rejectedSchedulingRequestsBuilder_ != null) { return rejectedSchedulingRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rejectedSchedulingRequests_); } } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder addRejectedSchedulingRequestsBuilder() { return getRejectedSchedulingRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder addRejectedSchedulingRequestsBuilder( int index) { return getRejectedSchedulingRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.RejectedSchedulingRequestProto rejected_scheduling_requests = 18; */ public java.util.List getRejectedSchedulingRequestsBuilderList() { return getRejectedSchedulingRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder> getRejectedSchedulingRequestsFieldBuilder() { if (rejectedSchedulingRequestsBuilder_ == null) { rejectedSchedulingRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder>( rejectedSchedulingRequests_, ((bitField0_ & 0x00008000) != 0), getParentForChildren(), isClean()); rejectedSchedulingRequests_ = null; } return rejectedSchedulingRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.AllocateResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.AllocateResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public AllocateResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new AllocateResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNewApplicationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewApplicationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetNewApplicationRequestProto} */ public static final class GetNewApplicationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewApplicationRequestProto) GetNewApplicationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNewApplicationRequestProto.newBuilder() to construct. private GetNewApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNewApplicationRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNewApplicationRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNewApplicationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewApplicationRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewApplicationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewApplicationRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNewApplicationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNewApplicationRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNewApplicationResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewApplicationResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ boolean hasMaximumCapability(); /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability(); /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetNewApplicationResponseProto} */ public static final class GetNewApplicationResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewApplicationResponseProto) GetNewApplicationResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNewApplicationResponseProto.newBuilder() to construct. private GetNewApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNewApplicationResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNewApplicationResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = maximumCapability_.toBuilder(); } maximumCapability_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(maximumCapability_); maximumCapability_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int MAXIMUMCAPABILITY_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_; /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public boolean hasMaximumCapability() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasMaximumCapability()) { if (!getMaximumCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getMaximumCapability()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getMaximumCapability()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasMaximumCapability() != other.hasMaximumCapability()) return false; if (hasMaximumCapability()) { if (!getMaximumCapability() .equals(other.getMaximumCapability())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasMaximumCapability()) { hash = (37 * hash) + MAXIMUMCAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getMaximumCapability().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNewApplicationResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewApplicationResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); getMaximumCapabilityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (maximumCapabilityBuilder_ == null) { maximumCapability_ = null; } else { maximumCapabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (maximumCapabilityBuilder_ == null) { result.maximumCapability_ = maximumCapability_; } else { result.maximumCapability_ = maximumCapabilityBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasMaximumCapability()) { mergeMaximumCapability(other.getMaximumCapability()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasMaximumCapability()) { if (!getMaximumCapability().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto maximumCapability_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> maximumCapabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public boolean hasMaximumCapability() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getMaximumCapability() { if (maximumCapabilityBuilder_ == null) { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } else { return maximumCapabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public Builder setMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (maximumCapabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } maximumCapability_ = value; onChanged(); } else { maximumCapabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public Builder setMaximumCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (maximumCapabilityBuilder_ == null) { maximumCapability_ = builderForValue.build(); onChanged(); } else { maximumCapabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public Builder mergeMaximumCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (maximumCapabilityBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && maximumCapability_ != null && maximumCapability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { maximumCapability_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(maximumCapability_).mergeFrom(value).buildPartial(); } else { maximumCapability_ = value; } onChanged(); } else { maximumCapabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public Builder clearMaximumCapability() { if (maximumCapabilityBuilder_ == null) { maximumCapability_ = null; onChanged(); } else { maximumCapabilityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getMaximumCapabilityBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMaximumCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getMaximumCapabilityOrBuilder() { if (maximumCapabilityBuilder_ != null) { return maximumCapabilityBuilder_.getMessageOrBuilder(); } else { return maximumCapability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : maximumCapability_; } } /** * optional .hadoop.yarn.ResourceProto maximumCapability = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getMaximumCapabilityFieldBuilder() { if (maximumCapabilityBuilder_ == null) { maximumCapabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getMaximumCapability(), getParentForChildren(), isClean()); maximumCapability_ = null; } return maximumCapabilityBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewApplicationResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewApplicationResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNewApplicationResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNewApplicationResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationReportRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationReportRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationReportRequestProto} */ public static final class GetApplicationReportRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationReportRequestProto) GetApplicationReportRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationReportRequestProto.newBuilder() to construct. private GetApplicationReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationReportRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationReportRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationReportRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationReportRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationReportRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationReportRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationReportRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationReportRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationReportResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationReportResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ boolean hasApplicationReport(); /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport(); /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationReportResponseProto} */ public static final class GetApplicationReportResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationReportResponseProto) GetApplicationReportResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationReportResponseProto.newBuilder() to construct. private GetApplicationReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationReportResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationReportResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationReport_.toBuilder(); } applicationReport_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationReport_); applicationReport_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.Builder.class); } private int bitField0_; public static final int APPLICATION_REPORT_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto applicationReport_; /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public boolean hasApplicationReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport() { return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_; } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder() { return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasApplicationReport()) { if (!getApplicationReport().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationReport()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationReport()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto) obj; if (hasApplicationReport() != other.hasApplicationReport()) return false; if (hasApplicationReport()) { if (!getApplicationReport() .equals(other.getApplicationReport())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationReport()) { hash = (37 * hash) + APPLICATION_REPORT_FIELD_NUMBER; hash = (53 * hash) + getApplicationReport().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationReportResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationReportResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationReportFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationReportBuilder_ == null) { applicationReport_ = null; } else { applicationReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationReportBuilder_ == null) { result.applicationReport_ = applicationReport_; } else { result.applicationReport_ = applicationReportBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto.getDefaultInstance()) return this; if (other.hasApplicationReport()) { mergeApplicationReport(other.getApplicationReport()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasApplicationReport()) { if (!getApplicationReport().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto applicationReport_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationReportBuilder_; /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public boolean hasApplicationReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplicationReport() { if (applicationReportBuilder_ == null) { return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_; } else { return applicationReportBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public Builder setApplicationReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationReport_ = value; onChanged(); } else { applicationReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public Builder setApplicationReport( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationReportBuilder_ == null) { applicationReport_ = builderForValue.build(); onChanged(); } else { applicationReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public Builder mergeApplicationReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationReportBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationReport_ != null && applicationReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()) { applicationReport_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.newBuilder(applicationReport_).mergeFrom(value).buildPartial(); } else { applicationReport_ = value; } onChanged(); } else { applicationReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public Builder clearApplicationReport() { if (applicationReportBuilder_ == null) { applicationReport_ = null; onChanged(); } else { applicationReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationReportFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationReportOrBuilder() { if (applicationReportBuilder_ != null) { return applicationReportBuilder_.getMessageOrBuilder(); } else { return applicationReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance() : applicationReport_; } } /** * optional .hadoop.yarn.ApplicationReportProto application_report = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> getApplicationReportFieldBuilder() { if (applicationReportBuilder_ == null) { applicationReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>( getApplicationReport(), getParentForChildren(), isClean()); applicationReport_ = null; } return applicationReportBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationReportResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationReportResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationReportResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationReportResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SubmitApplicationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SubmitApplicationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ boolean hasApplicationSubmissionContext(); /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext(); /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.SubmitApplicationRequestProto} */ public static final class SubmitApplicationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SubmitApplicationRequestProto) SubmitApplicationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SubmitApplicationRequestProto.newBuilder() to construct. private SubmitApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SubmitApplicationRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SubmitApplicationRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationSubmissionContext_.toBuilder(); } applicationSubmissionContext_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationSubmissionContext_); applicationSubmissionContext_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_SUBMISSION_CONTEXT_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto applicationSubmissionContext_; /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public boolean hasApplicationSubmissionContext() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext() { return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_; } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder() { return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasApplicationSubmissionContext()) { if (!getApplicationSubmissionContext().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationSubmissionContext()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationSubmissionContext()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto) obj; if (hasApplicationSubmissionContext() != other.hasApplicationSubmissionContext()) return false; if (hasApplicationSubmissionContext()) { if (!getApplicationSubmissionContext() .equals(other.getApplicationSubmissionContext())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationSubmissionContext()) { hash = (37 * hash) + APPLICATION_SUBMISSION_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getApplicationSubmissionContext().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SubmitApplicationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SubmitApplicationRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationSubmissionContextFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationSubmissionContextBuilder_ == null) { applicationSubmissionContext_ = null; } else { applicationSubmissionContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationSubmissionContextBuilder_ == null) { result.applicationSubmissionContext_ = applicationSubmissionContext_; } else { result.applicationSubmissionContext_ = applicationSubmissionContextBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto.getDefaultInstance()) return this; if (other.hasApplicationSubmissionContext()) { mergeApplicationSubmissionContext(other.getApplicationSubmissionContext()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasApplicationSubmissionContext()) { if (!getApplicationSubmissionContext().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto applicationSubmissionContext_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder> applicationSubmissionContextBuilder_; /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public boolean hasApplicationSubmissionContext() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getApplicationSubmissionContext() { if (applicationSubmissionContextBuilder_ == null) { return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_; } else { return applicationSubmissionContextBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public Builder setApplicationSubmissionContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) { if (applicationSubmissionContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationSubmissionContext_ = value; onChanged(); } else { applicationSubmissionContextBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public Builder setApplicationSubmissionContext( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder builderForValue) { if (applicationSubmissionContextBuilder_ == null) { applicationSubmissionContext_ = builderForValue.build(); onChanged(); } else { applicationSubmissionContextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public Builder mergeApplicationSubmissionContext(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto value) { if (applicationSubmissionContextBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationSubmissionContext_ != null && applicationSubmissionContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance()) { applicationSubmissionContext_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.newBuilder(applicationSubmissionContext_).mergeFrom(value).buildPartial(); } else { applicationSubmissionContext_ = value; } onChanged(); } else { applicationSubmissionContextBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public Builder clearApplicationSubmissionContext() { if (applicationSubmissionContextBuilder_ == null) { applicationSubmissionContext_ = null; onChanged(); } else { applicationSubmissionContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder getApplicationSubmissionContextBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationSubmissionContextFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder getApplicationSubmissionContextOrBuilder() { if (applicationSubmissionContextBuilder_ != null) { return applicationSubmissionContextBuilder_.getMessageOrBuilder(); } else { return applicationSubmissionContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance() : applicationSubmissionContext_; } } /** * optional .hadoop.yarn.ApplicationSubmissionContextProto application_submission_context = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder> getApplicationSubmissionContextFieldBuilder() { if (applicationSubmissionContextBuilder_ == null) { applicationSubmissionContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder>( getApplicationSubmissionContext(), getParentForChildren(), isClean()); applicationSubmissionContext_ = null; } return applicationSubmissionContextBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SubmitApplicationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SubmitApplicationRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SubmitApplicationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new SubmitApplicationRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SubmitApplicationResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SubmitApplicationResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.SubmitApplicationResponseProto} */ public static final class SubmitApplicationResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SubmitApplicationResponseProto) SubmitApplicationResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SubmitApplicationResponseProto.newBuilder() to construct. private SubmitApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SubmitApplicationResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SubmitApplicationResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SubmitApplicationResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SubmitApplicationResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SubmitApplicationResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SubmitApplicationResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SubmitApplicationResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new SubmitApplicationResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FailApplicationAttemptRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FailApplicationAttemptRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ boolean hasApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.FailApplicationAttemptRequestProto} */ public static final class FailApplicationAttemptRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FailApplicationAttemptRequestProto) FailApplicationAttemptRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FailApplicationAttemptRequestProto.newBuilder() to construct. private FailApplicationAttemptRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FailApplicationAttemptRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FailApplicationAttemptRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationAttemptId_.toBuilder(); } applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationAttemptId_); applicationAttemptId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationAttemptId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationAttemptId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto) obj; if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false; if (hasApplicationAttemptId()) { if (!getApplicationAttemptId() .equals(other.getApplicationAttemptId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationAttemptId()) { hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FailApplicationAttemptRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FailApplicationAttemptRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationAttemptIdBuilder_ == null) { result.applicationAttemptId_ = applicationAttemptId_; } else { result.applicationAttemptId_ = applicationAttemptIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto.getDefaultInstance()) return this; if (other.hasApplicationAttemptId()) { mergeApplicationAttemptId(other.getApplicationAttemptId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } else { return applicationAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationAttemptId_ = value; onChanged(); } else { applicationAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = builderForValue.build(); onChanged(); } else { applicationAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationAttemptId_ != null && applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial(); } else { applicationAttemptId_ = value; } onChanged(); } else { applicationAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder clearApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; onChanged(); } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { if (applicationAttemptIdBuilder_ != null) { return applicationAttemptIdBuilder_.getMessageOrBuilder(); } else { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getApplicationAttemptIdFieldBuilder() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getApplicationAttemptId(), getParentForChildren(), isClean()); applicationAttemptId_ = null; } return applicationAttemptIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FailApplicationAttemptRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FailApplicationAttemptRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FailApplicationAttemptRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new FailApplicationAttemptRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FailApplicationAttemptResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FailApplicationAttemptResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.FailApplicationAttemptResponseProto} */ public static final class FailApplicationAttemptResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FailApplicationAttemptResponseProto) FailApplicationAttemptResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FailApplicationAttemptResponseProto.newBuilder() to construct. private FailApplicationAttemptResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FailApplicationAttemptResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FailApplicationAttemptResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FailApplicationAttemptResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FailApplicationAttemptResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FailApplicationAttemptResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FailApplicationAttemptResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FailApplicationAttemptResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new FailApplicationAttemptResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface KillApplicationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.KillApplicationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional string diagnostics = 2; */ boolean hasDiagnostics(); /** * optional string diagnostics = 2; */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); } /** * Protobuf type {@code hadoop.yarn.KillApplicationRequestProto} */ public static final class KillApplicationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.KillApplicationRequestProto) KillApplicationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use KillApplicationRequestProto.newBuilder() to construct. private KillApplicationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private KillApplicationRequestProto() { diagnostics_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private KillApplicationRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; diagnostics_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int DIAGNOSTICS_FIELD_NUMBER = 2; private volatile java.lang.Object diagnostics_; /** * optional string diagnostics = 2; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string diagnostics = 2; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, diagnostics_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, diagnostics_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.KillApplicationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.KillApplicationRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); diagnostics_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.diagnostics_ = diagnostics_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasDiagnostics()) { bitField0_ |= 0x00000002; diagnostics_ = other.diagnostics_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object diagnostics_ = ""; /** * optional string diagnostics = 2; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string diagnostics = 2; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 2; */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; diagnostics_ = value; onChanged(); return this; } /** * optional string diagnostics = 2; */ public Builder clearDiagnostics() { bitField0_ = (bitField0_ & ~0x00000002); diagnostics_ = getDefaultInstance().getDiagnostics(); onChanged(); return this; } /** * optional string diagnostics = 2; */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; diagnostics_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.KillApplicationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.KillApplicationRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public KillApplicationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new KillApplicationRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface KillApplicationResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.KillApplicationResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional bool is_kill_completed = 1 [default = false]; */ boolean hasIsKillCompleted(); /** * optional bool is_kill_completed = 1 [default = false]; */ boolean getIsKillCompleted(); } /** * Protobuf type {@code hadoop.yarn.KillApplicationResponseProto} */ public static final class KillApplicationResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.KillApplicationResponseProto) KillApplicationResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use KillApplicationResponseProto.newBuilder() to construct. private KillApplicationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private KillApplicationResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private KillApplicationResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; isKillCompleted_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.Builder.class); } private int bitField0_; public static final int IS_KILL_COMPLETED_FIELD_NUMBER = 1; private boolean isKillCompleted_; /** * optional bool is_kill_completed = 1 [default = false]; */ public boolean hasIsKillCompleted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool is_kill_completed = 1 [default = false]; */ public boolean getIsKillCompleted() { return isKillCompleted_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeBool(1, isKillCompleted_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(1, isKillCompleted_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto) obj; if (hasIsKillCompleted() != other.hasIsKillCompleted()) return false; if (hasIsKillCompleted()) { if (getIsKillCompleted() != other.getIsKillCompleted()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIsKillCompleted()) { hash = (37 * hash) + IS_KILL_COMPLETED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIsKillCompleted()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.KillApplicationResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.KillApplicationResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); isKillCompleted_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.isKillCompleted_ = isKillCompleted_; to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto.getDefaultInstance()) return this; if (other.hasIsKillCompleted()) { setIsKillCompleted(other.getIsKillCompleted()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean isKillCompleted_ ; /** * optional bool is_kill_completed = 1 [default = false]; */ public boolean hasIsKillCompleted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool is_kill_completed = 1 [default = false]; */ public boolean getIsKillCompleted() { return isKillCompleted_; } /** * optional bool is_kill_completed = 1 [default = false]; */ public Builder setIsKillCompleted(boolean value) { bitField0_ |= 0x00000001; isKillCompleted_ = value; onChanged(); return this; } /** * optional bool is_kill_completed = 1 [default = false]; */ public Builder clearIsKillCompleted() { bitField0_ = (bitField0_ & ~0x00000001); isKillCompleted_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.KillApplicationResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.KillApplicationResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public KillApplicationResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new KillApplicationResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterMetricsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterMetricsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetClusterMetricsRequestProto} */ public static final class GetClusterMetricsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterMetricsRequestProto) GetClusterMetricsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterMetricsRequestProto.newBuilder() to construct. private GetClusterMetricsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterMetricsRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterMetricsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterMetricsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterMetricsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterMetricsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterMetricsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterMetricsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterMetricsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterMetricsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterMetricsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ boolean hasClusterMetrics(); /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics(); /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetClusterMetricsResponseProto} */ public static final class GetClusterMetricsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterMetricsResponseProto) GetClusterMetricsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterMetricsResponseProto.newBuilder() to construct. private GetClusterMetricsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterMetricsResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterMetricsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = clusterMetrics_.toBuilder(); } clusterMetrics_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(clusterMetrics_); clusterMetrics_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.Builder.class); } private int bitField0_; public static final int CLUSTER_METRICS_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto clusterMetrics_; /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public boolean hasClusterMetrics() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics() { return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_; } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder() { return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getClusterMetrics()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getClusterMetrics()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto) obj; if (hasClusterMetrics() != other.hasClusterMetrics()) return false; if (hasClusterMetrics()) { if (!getClusterMetrics() .equals(other.getClusterMetrics())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasClusterMetrics()) { hash = (37 * hash) + CLUSTER_METRICS_FIELD_NUMBER; hash = (53 * hash) + getClusterMetrics().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterMetricsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterMetricsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getClusterMetricsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (clusterMetricsBuilder_ == null) { clusterMetrics_ = null; } else { clusterMetricsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (clusterMetricsBuilder_ == null) { result.clusterMetrics_ = clusterMetrics_; } else { result.clusterMetrics_ = clusterMetricsBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto.getDefaultInstance()) return this; if (other.hasClusterMetrics()) { mergeClusterMetrics(other.getClusterMetrics()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto clusterMetrics_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder> clusterMetricsBuilder_; /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public boolean hasClusterMetrics() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getClusterMetrics() { if (clusterMetricsBuilder_ == null) { return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_; } else { return clusterMetricsBuilder_.getMessage(); } } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public Builder setClusterMetrics(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto value) { if (clusterMetricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } clusterMetrics_ = value; onChanged(); } else { clusterMetricsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public Builder setClusterMetrics( org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder builderForValue) { if (clusterMetricsBuilder_ == null) { clusterMetrics_ = builderForValue.build(); onChanged(); } else { clusterMetricsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public Builder mergeClusterMetrics(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto value) { if (clusterMetricsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && clusterMetrics_ != null && clusterMetrics_ != org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance()) { clusterMetrics_ = org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.newBuilder(clusterMetrics_).mergeFrom(value).buildPartial(); } else { clusterMetrics_ = value; } onChanged(); } else { clusterMetricsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public Builder clearClusterMetrics() { if (clusterMetricsBuilder_ == null) { clusterMetrics_ = null; onChanged(); } else { clusterMetricsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder getClusterMetricsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getClusterMetricsFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder getClusterMetricsOrBuilder() { if (clusterMetricsBuilder_ != null) { return clusterMetricsBuilder_.getMessageOrBuilder(); } else { return clusterMetrics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance() : clusterMetrics_; } } /** * optional .hadoop.yarn.YarnClusterMetricsProto cluster_metrics = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder> getClusterMetricsFieldBuilder() { if (clusterMetricsBuilder_ == null) { clusterMetricsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder>( getClusterMetrics(), getParentForChildren(), isClean()); clusterMetrics_ = null; } return clusterMetricsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterMetricsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterMetricsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterMetricsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterMetricsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MoveApplicationAcrossQueuesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ boolean hasApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * required string target_queue = 2; */ boolean hasTargetQueue(); /** * required string target_queue = 2; */ java.lang.String getTargetQueue(); /** * required string target_queue = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getTargetQueueBytes(); } /** * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesRequestProto} */ public static final class MoveApplicationAcrossQueuesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto) MoveApplicationAcrossQueuesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use MoveApplicationAcrossQueuesRequestProto.newBuilder() to construct. private MoveApplicationAcrossQueuesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private MoveApplicationAcrossQueuesRequestProto() { targetQueue_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MoveApplicationAcrossQueuesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; targetQueue_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int TARGET_QUEUE_FIELD_NUMBER = 2; private volatile java.lang.Object targetQueue_; /** * required string target_queue = 2; */ public boolean hasTargetQueue() { return ((bitField0_ & 0x00000002) != 0); } /** * required string target_queue = 2; */ public java.lang.String getTargetQueue() { java.lang.Object ref = targetQueue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { targetQueue_ = s; } return s; } } /** * required string target_queue = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTargetQueueBytes() { java.lang.Object ref = targetQueue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); targetQueue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasApplicationId()) { memoizedIsInitialized = 0; return false; } if (!hasTargetQueue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetQueue_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, targetQueue_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasTargetQueue() != other.hasTargetQueue()) return false; if (hasTargetQueue()) { if (!getTargetQueue() .equals(other.getTargetQueue())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasTargetQueue()) { hash = (37 * hash) + TARGET_QUEUE_FIELD_NUMBER; hash = (53 * hash) + getTargetQueue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); targetQueue_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.targetQueue_ = targetQueue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasTargetQueue()) { bitField0_ |= 0x00000002; targetQueue_ = other.targetQueue_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasApplicationId()) { return false; } if (!hasTargetQueue()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * required .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object targetQueue_ = ""; /** * required string target_queue = 2; */ public boolean hasTargetQueue() { return ((bitField0_ & 0x00000002) != 0); } /** * required string target_queue = 2; */ public java.lang.String getTargetQueue() { java.lang.Object ref = targetQueue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { targetQueue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string target_queue = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getTargetQueueBytes() { java.lang.Object ref = targetQueue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); targetQueue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string target_queue = 2; */ public Builder setTargetQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; targetQueue_ = value; onChanged(); return this; } /** * required string target_queue = 2; */ public Builder clearTargetQueue() { bitField0_ = (bitField0_ & ~0x00000002); targetQueue_ = getDefaultInstance().getTargetQueue(); onChanged(); return this; } /** * required string target_queue = 2; */ public Builder setTargetQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; targetQueue_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.MoveApplicationAcrossQueuesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public MoveApplicationAcrossQueuesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new MoveApplicationAcrossQueuesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MoveApplicationAcrossQueuesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesResponseProto} */ public static final class MoveApplicationAcrossQueuesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto) MoveApplicationAcrossQueuesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use MoveApplicationAcrossQueuesResponseProto.newBuilder() to construct. private MoveApplicationAcrossQueuesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private MoveApplicationAcrossQueuesResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MoveApplicationAcrossQueuesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.MoveApplicationAcrossQueuesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.MoveApplicationAcrossQueuesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public MoveApplicationAcrossQueuesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new MoveApplicationAcrossQueuesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string application_types = 1; */ java.util.List getApplicationTypesList(); /** * repeated string application_types = 1; */ int getApplicationTypesCount(); /** * repeated string application_types = 1; */ java.lang.String getApplicationTypes(int index); /** * repeated string application_types = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypesBytes(int index); /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ java.util.List getApplicationStatesList(); /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ int getApplicationStatesCount(); /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index); /** * repeated string users = 3; */ java.util.List getUsersList(); /** * repeated string users = 3; */ int getUsersCount(); /** * repeated string users = 3; */ java.lang.String getUsers(int index); /** * repeated string users = 3; */ org.apache.hadoop.thirdparty.protobuf.ByteString getUsersBytes(int index); /** * repeated string queues = 4; */ java.util.List getQueuesList(); /** * repeated string queues = 4; */ int getQueuesCount(); /** * repeated string queues = 4; */ java.lang.String getQueues(int index); /** * repeated string queues = 4; */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueuesBytes(int index); /** * optional int64 limit = 5; */ boolean hasLimit(); /** * optional int64 limit = 5; */ long getLimit(); /** * optional int64 start_begin = 6; */ boolean hasStartBegin(); /** * optional int64 start_begin = 6; */ long getStartBegin(); /** * optional int64 start_end = 7; */ boolean hasStartEnd(); /** * optional int64 start_end = 7; */ long getStartEnd(); /** * optional int64 finish_begin = 8; */ boolean hasFinishBegin(); /** * optional int64 finish_begin = 8; */ long getFinishBegin(); /** * optional int64 finish_end = 9; */ boolean hasFinishEnd(); /** * optional int64 finish_end = 9; */ long getFinishEnd(); /** * repeated string applicationTags = 10; */ java.util.List getApplicationTagsList(); /** * repeated string applicationTags = 10; */ int getApplicationTagsCount(); /** * repeated string applicationTags = 10; */ java.lang.String getApplicationTags(int index); /** * repeated string applicationTags = 10; */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index); /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ boolean hasScope(); /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope(); /** * optional string name = 12; */ boolean hasName(); /** * optional string name = 12; */ java.lang.String getName(); /** * optional string name = 12; */ org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationsRequestProto} */ public static final class GetApplicationsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsRequestProto) GetApplicationsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationsRequestProto.newBuilder() to construct. private GetApplicationsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationsRequestProto() { applicationTypes_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; applicationStates_ = java.util.Collections.emptyList(); users_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; queues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; scope_ = 0; name_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { applicationTypes_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } applicationTypes_.add(bs); break; } case 16: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { if (!((mutable_bitField0_ & 0x00000002) != 0)) { applicationStates_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } applicationStates_.add(rawValue); } break; } case 18: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { if (!((mutable_bitField0_ & 0x00000002) != 0)) { applicationStates_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } applicationStates_.add(rawValue); } } input.popLimit(oldLimit); break; } case 26: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) != 0)) { users_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } users_.add(bs); break; } case 34: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000008) != 0)) { queues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000008; } queues_.add(bs); break; } case 40: { bitField0_ |= 0x00000001; limit_ = input.readInt64(); break; } case 48: { bitField0_ |= 0x00000002; startBegin_ = input.readInt64(); break; } case 56: { bitField0_ |= 0x00000004; startEnd_ = input.readInt64(); break; } case 64: { bitField0_ |= 0x00000008; finishBegin_ = input.readInt64(); break; } case 72: { bitField0_ |= 0x00000010; finishEnd_ = input.readInt64(); break; } case 82: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000200) != 0)) { applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000200; } applicationTags_.add(bs); break; } case 88: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto value = org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(11, rawValue); } else { bitField0_ |= 0x00000020; scope_ = rawValue; } break; } case 98: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000040; name_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { applicationTypes_ = applicationTypes_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000002) != 0)) { applicationStates_ = java.util.Collections.unmodifiableList(applicationStates_); } if (((mutable_bitField0_ & 0x00000004) != 0)) { users_ = users_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000008) != 0)) { queues_ = queues_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000200) != 0)) { applicationTags_ = applicationTags_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_TYPES_FIELD_NUMBER = 1; private org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTypes_; /** * repeated string application_types = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTypesList() { return applicationTypes_; } /** * repeated string application_types = 1; */ public int getApplicationTypesCount() { return applicationTypes_.size(); } /** * repeated string application_types = 1; */ public java.lang.String getApplicationTypes(int index) { return applicationTypes_.get(index); } /** * repeated string application_types = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypesBytes(int index) { return applicationTypes_.getByteString(index); } public static final int APPLICATION_STATES_FIELD_NUMBER = 2; private java.util.List applicationStates_; private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto> applicationStates_converter_ = new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>() { public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto convert(java.lang.Integer from) { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.valueOf(from); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result; } }; /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public java.util.List getApplicationStatesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>(applicationStates_, applicationStates_converter_); } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public int getApplicationStatesCount() { return applicationStates_.size(); } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index) { return applicationStates_converter_.convert(applicationStates_.get(index)); } public static final int USERS_FIELD_NUMBER = 3; private org.apache.hadoop.thirdparty.protobuf.LazyStringList users_; /** * repeated string users = 3; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getUsersList() { return users_; } /** * repeated string users = 3; */ public int getUsersCount() { return users_.size(); } /** * repeated string users = 3; */ public java.lang.String getUsers(int index) { return users_.get(index); } /** * repeated string users = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getUsersBytes(int index) { return users_.getByteString(index); } public static final int QUEUES_FIELD_NUMBER = 4; private org.apache.hadoop.thirdparty.protobuf.LazyStringList queues_; /** * repeated string queues = 4; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getQueuesList() { return queues_; } /** * repeated string queues = 4; */ public int getQueuesCount() { return queues_.size(); } /** * repeated string queues = 4; */ public java.lang.String getQueues(int index) { return queues_.get(index); } /** * repeated string queues = 4; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueuesBytes(int index) { return queues_.getByteString(index); } public static final int LIMIT_FIELD_NUMBER = 5; private long limit_; /** * optional int64 limit = 5; */ public boolean hasLimit() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 limit = 5; */ public long getLimit() { return limit_; } public static final int START_BEGIN_FIELD_NUMBER = 6; private long startBegin_; /** * optional int64 start_begin = 6; */ public boolean hasStartBegin() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 start_begin = 6; */ public long getStartBegin() { return startBegin_; } public static final int START_END_FIELD_NUMBER = 7; private long startEnd_; /** * optional int64 start_end = 7; */ public boolean hasStartEnd() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 start_end = 7; */ public long getStartEnd() { return startEnd_; } public static final int FINISH_BEGIN_FIELD_NUMBER = 8; private long finishBegin_; /** * optional int64 finish_begin = 8; */ public boolean hasFinishBegin() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 finish_begin = 8; */ public long getFinishBegin() { return finishBegin_; } public static final int FINISH_END_FIELD_NUMBER = 9; private long finishEnd_; /** * optional int64 finish_end = 9; */ public boolean hasFinishEnd() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 finish_end = 9; */ public long getFinishEnd() { return finishEnd_; } public static final int APPLICATIONTAGS_FIELD_NUMBER = 10; private org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_; /** * repeated string applicationTags = 10; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_; } /** * repeated string applicationTags = 10; */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 10; */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 10; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } public static final int SCOPE_FIELD_NUMBER = 11; private int scope_; /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public boolean hasScope() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.valueOf(scope_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.ALL : result; } public static final int NAME_FIELD_NUMBER = 12; private volatile java.lang.Object name_; /** * optional string name = 12; */ public boolean hasName() { return ((bitField0_ & 0x00000040) != 0); } /** * optional string name = 12; */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * optional string name = 12; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < applicationTypes_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, applicationTypes_.getRaw(i)); } for (int i = 0; i < applicationStates_.size(); i++) { output.writeEnum(2, applicationStates_.get(i)); } for (int i = 0; i < users_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, users_.getRaw(i)); } for (int i = 0; i < queues_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, queues_.getRaw(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(5, limit_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(6, startBegin_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(7, startEnd_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt64(8, finishBegin_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeInt64(9, finishEnd_); } for (int i = 0; i < applicationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, applicationTags_.getRaw(i)); } if (((bitField0_ & 0x00000020) != 0)) { output.writeEnum(11, scope_); } if (((bitField0_ & 0x00000040) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 12, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < applicationTypes_.size(); i++) { dataSize += computeStringSizeNoTag(applicationTypes_.getRaw(i)); } size += dataSize; size += 1 * getApplicationTypesList().size(); } { int dataSize = 0; for (int i = 0; i < applicationStates_.size(); i++) { dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSizeNoTag(applicationStates_.get(i)); } size += dataSize; size += 1 * applicationStates_.size(); } { int dataSize = 0; for (int i = 0; i < users_.size(); i++) { dataSize += computeStringSizeNoTag(users_.getRaw(i)); } size += dataSize; size += 1 * getUsersList().size(); } { int dataSize = 0; for (int i = 0; i < queues_.size(); i++) { dataSize += computeStringSizeNoTag(queues_.getRaw(i)); } size += dataSize; size += 1 * getQueuesList().size(); } if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(5, limit_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(6, startBegin_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(7, startEnd_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(8, finishBegin_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(9, finishEnd_); } { int dataSize = 0; for (int i = 0; i < applicationTags_.size(); i++) { dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i)); } size += dataSize; size += 1 * getApplicationTagsList().size(); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(11, scope_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(12, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto) obj; if (!getApplicationTypesList() .equals(other.getApplicationTypesList())) return false; if (!applicationStates_.equals(other.applicationStates_)) return false; if (!getUsersList() .equals(other.getUsersList())) return false; if (!getQueuesList() .equals(other.getQueuesList())) return false; if (hasLimit() != other.hasLimit()) return false; if (hasLimit()) { if (getLimit() != other.getLimit()) return false; } if (hasStartBegin() != other.hasStartBegin()) return false; if (hasStartBegin()) { if (getStartBegin() != other.getStartBegin()) return false; } if (hasStartEnd() != other.hasStartEnd()) return false; if (hasStartEnd()) { if (getStartEnd() != other.getStartEnd()) return false; } if (hasFinishBegin() != other.hasFinishBegin()) return false; if (hasFinishBegin()) { if (getFinishBegin() != other.getFinishBegin()) return false; } if (hasFinishEnd() != other.hasFinishEnd()) return false; if (hasFinishEnd()) { if (getFinishEnd() != other.getFinishEnd()) return false; } if (!getApplicationTagsList() .equals(other.getApplicationTagsList())) return false; if (hasScope() != other.hasScope()) return false; if (hasScope()) { if (scope_ != other.scope_) return false; } if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName() .equals(other.getName())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getApplicationTypesCount() > 0) { hash = (37 * hash) + APPLICATION_TYPES_FIELD_NUMBER; hash = (53 * hash) + getApplicationTypesList().hashCode(); } if (getApplicationStatesCount() > 0) { hash = (37 * hash) + APPLICATION_STATES_FIELD_NUMBER; hash = (53 * hash) + applicationStates_.hashCode(); } if (getUsersCount() > 0) { hash = (37 * hash) + USERS_FIELD_NUMBER; hash = (53 * hash) + getUsersList().hashCode(); } if (getQueuesCount() > 0) { hash = (37 * hash) + QUEUES_FIELD_NUMBER; hash = (53 * hash) + getQueuesList().hashCode(); } if (hasLimit()) { hash = (37 * hash) + LIMIT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getLimit()); } if (hasStartBegin()) { hash = (37 * hash) + START_BEGIN_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartBegin()); } if (hasStartEnd()) { hash = (37 * hash) + START_END_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartEnd()); } if (hasFinishBegin()) { hash = (37 * hash) + FINISH_BEGIN_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFinishBegin()); } if (hasFinishEnd()) { hash = (37 * hash) + FINISH_END_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFinishEnd()); } if (getApplicationTagsCount() > 0) { hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTagsList().hashCode(); } if (hasScope()) { hash = (37 * hash) + SCOPE_FIELD_NUMBER; hash = (53 * hash) + scope_; } if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); applicationTypes_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); applicationStates_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); users_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); queues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); limit_ = 0L; bitField0_ = (bitField0_ & ~0x00000010); startBegin_ = 0L; bitField0_ = (bitField0_ & ~0x00000020); startEnd_ = 0L; bitField0_ = (bitField0_ & ~0x00000040); finishBegin_ = 0L; bitField0_ = (bitField0_ & ~0x00000080); finishEnd_ = 0L; bitField0_ = (bitField0_ & ~0x00000100); applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); scope_ = 0; bitField0_ = (bitField0_ & ~0x00000400); name_ = ""; bitField0_ = (bitField0_ & ~0x00000800); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) != 0)) { applicationTypes_ = applicationTypes_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.applicationTypes_ = applicationTypes_; if (((bitField0_ & 0x00000002) != 0)) { applicationStates_ = java.util.Collections.unmodifiableList(applicationStates_); bitField0_ = (bitField0_ & ~0x00000002); } result.applicationStates_ = applicationStates_; if (((bitField0_ & 0x00000004) != 0)) { users_ = users_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.users_ = users_; if (((bitField0_ & 0x00000008) != 0)) { queues_ = queues_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000008); } result.queues_ = queues_; if (((from_bitField0_ & 0x00000010) != 0)) { result.limit_ = limit_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000020) != 0)) { result.startBegin_ = startBegin_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000040) != 0)) { result.startEnd_ = startEnd_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000080) != 0)) { result.finishBegin_ = finishBegin_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000100) != 0)) { result.finishEnd_ = finishEnd_; to_bitField0_ |= 0x00000010; } if (((bitField0_ & 0x00000200) != 0)) { applicationTags_ = applicationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000200); } result.applicationTags_ = applicationTags_; if (((from_bitField0_ & 0x00000400) != 0)) { to_bitField0_ |= 0x00000020; } result.scope_ = scope_; if (((from_bitField0_ & 0x00000800) != 0)) { to_bitField0_ |= 0x00000040; } result.name_ = name_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto.getDefaultInstance()) return this; if (!other.applicationTypes_.isEmpty()) { if (applicationTypes_.isEmpty()) { applicationTypes_ = other.applicationTypes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureApplicationTypesIsMutable(); applicationTypes_.addAll(other.applicationTypes_); } onChanged(); } if (!other.applicationStates_.isEmpty()) { if (applicationStates_.isEmpty()) { applicationStates_ = other.applicationStates_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureApplicationStatesIsMutable(); applicationStates_.addAll(other.applicationStates_); } onChanged(); } if (!other.users_.isEmpty()) { if (users_.isEmpty()) { users_ = other.users_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureUsersIsMutable(); users_.addAll(other.users_); } onChanged(); } if (!other.queues_.isEmpty()) { if (queues_.isEmpty()) { queues_ = other.queues_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureQueuesIsMutable(); queues_.addAll(other.queues_); } onChanged(); } if (other.hasLimit()) { setLimit(other.getLimit()); } if (other.hasStartBegin()) { setStartBegin(other.getStartBegin()); } if (other.hasStartEnd()) { setStartEnd(other.getStartEnd()); } if (other.hasFinishBegin()) { setFinishBegin(other.getFinishBegin()); } if (other.hasFinishEnd()) { setFinishEnd(other.getFinishEnd()); } if (!other.applicationTags_.isEmpty()) { if (applicationTags_.isEmpty()) { applicationTags_ = other.applicationTags_; bitField0_ = (bitField0_ & ~0x00000200); } else { ensureApplicationTagsIsMutable(); applicationTags_.addAll(other.applicationTags_); } onChanged(); } if (other.hasScope()) { setScope(other.getScope()); } if (other.hasName()) { bitField0_ |= 0x00000800; name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTypes_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureApplicationTypesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { applicationTypes_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTypes_); bitField0_ |= 0x00000001; } } /** * repeated string application_types = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTypesList() { return applicationTypes_.getUnmodifiableView(); } /** * repeated string application_types = 1; */ public int getApplicationTypesCount() { return applicationTypes_.size(); } /** * repeated string application_types = 1; */ public java.lang.String getApplicationTypes(int index) { return applicationTypes_.get(index); } /** * repeated string application_types = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypesBytes(int index) { return applicationTypes_.getByteString(index); } /** * repeated string application_types = 1; */ public Builder setApplicationTypes( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTypesIsMutable(); applicationTypes_.set(index, value); onChanged(); return this; } /** * repeated string application_types = 1; */ public Builder addApplicationTypes( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTypesIsMutable(); applicationTypes_.add(value); onChanged(); return this; } /** * repeated string application_types = 1; */ public Builder addAllApplicationTypes( java.lang.Iterable values) { ensureApplicationTypesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTypes_); onChanged(); return this; } /** * repeated string application_types = 1; */ public Builder clearApplicationTypes() { applicationTypes_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string application_types = 1; */ public Builder addApplicationTypesBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTypesIsMutable(); applicationTypes_.add(value); onChanged(); return this; } private java.util.List applicationStates_ = java.util.Collections.emptyList(); private void ensureApplicationStatesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { applicationStates_ = new java.util.ArrayList(applicationStates_); bitField0_ |= 0x00000002; } } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public java.util.List getApplicationStatesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto>(applicationStates_, applicationStates_converter_); } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public int getApplicationStatesCount() { return applicationStates_.size(); } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getApplicationStates(int index) { return applicationStates_converter_.convert(applicationStates_.get(index)); } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public Builder setApplicationStates( int index, org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) { if (value == null) { throw new NullPointerException(); } ensureApplicationStatesIsMutable(); applicationStates_.set(index, value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public Builder addApplicationStates(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) { if (value == null) { throw new NullPointerException(); } ensureApplicationStatesIsMutable(); applicationStates_.add(value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public Builder addAllApplicationStates( java.lang.Iterable values) { ensureApplicationStatesIsMutable(); for (org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value : values) { applicationStates_.add(value.getNumber()); } onChanged(); return this; } /** * repeated .hadoop.yarn.YarnApplicationStateProto application_states = 2; */ public Builder clearApplicationStates() { applicationStates_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } private org.apache.hadoop.thirdparty.protobuf.LazyStringList users_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureUsersIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { users_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(users_); bitField0_ |= 0x00000004; } } /** * repeated string users = 3; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getUsersList() { return users_.getUnmodifiableView(); } /** * repeated string users = 3; */ public int getUsersCount() { return users_.size(); } /** * repeated string users = 3; */ public java.lang.String getUsers(int index) { return users_.get(index); } /** * repeated string users = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getUsersBytes(int index) { return users_.getByteString(index); } /** * repeated string users = 3; */ public Builder setUsers( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.set(index, value); onChanged(); return this; } /** * repeated string users = 3; */ public Builder addUsers( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(value); onChanged(); return this; } /** * repeated string users = 3; */ public Builder addAllUsers( java.lang.Iterable values) { ensureUsersIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, users_); onChanged(); return this; } /** * repeated string users = 3; */ public Builder clearUsers() { users_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * repeated string users = 3; */ public Builder addUsersBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(value); onChanged(); return this; } private org.apache.hadoop.thirdparty.protobuf.LazyStringList queues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureQueuesIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { queues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(queues_); bitField0_ |= 0x00000008; } } /** * repeated string queues = 4; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getQueuesList() { return queues_.getUnmodifiableView(); } /** * repeated string queues = 4; */ public int getQueuesCount() { return queues_.size(); } /** * repeated string queues = 4; */ public java.lang.String getQueues(int index) { return queues_.get(index); } /** * repeated string queues = 4; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueuesBytes(int index) { return queues_.getByteString(index); } /** * repeated string queues = 4; */ public Builder setQueues( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureQueuesIsMutable(); queues_.set(index, value); onChanged(); return this; } /** * repeated string queues = 4; */ public Builder addQueues( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureQueuesIsMutable(); queues_.add(value); onChanged(); return this; } /** * repeated string queues = 4; */ public Builder addAllQueues( java.lang.Iterable values) { ensureQueuesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, queues_); onChanged(); return this; } /** * repeated string queues = 4; */ public Builder clearQueues() { queues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * repeated string queues = 4; */ public Builder addQueuesBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureQueuesIsMutable(); queues_.add(value); onChanged(); return this; } private long limit_ ; /** * optional int64 limit = 5; */ public boolean hasLimit() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 limit = 5; */ public long getLimit() { return limit_; } /** * optional int64 limit = 5; */ public Builder setLimit(long value) { bitField0_ |= 0x00000010; limit_ = value; onChanged(); return this; } /** * optional int64 limit = 5; */ public Builder clearLimit() { bitField0_ = (bitField0_ & ~0x00000010); limit_ = 0L; onChanged(); return this; } private long startBegin_ ; /** * optional int64 start_begin = 6; */ public boolean hasStartBegin() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 start_begin = 6; */ public long getStartBegin() { return startBegin_; } /** * optional int64 start_begin = 6; */ public Builder setStartBegin(long value) { bitField0_ |= 0x00000020; startBegin_ = value; onChanged(); return this; } /** * optional int64 start_begin = 6; */ public Builder clearStartBegin() { bitField0_ = (bitField0_ & ~0x00000020); startBegin_ = 0L; onChanged(); return this; } private long startEnd_ ; /** * optional int64 start_end = 7; */ public boolean hasStartEnd() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 start_end = 7; */ public long getStartEnd() { return startEnd_; } /** * optional int64 start_end = 7; */ public Builder setStartEnd(long value) { bitField0_ |= 0x00000040; startEnd_ = value; onChanged(); return this; } /** * optional int64 start_end = 7; */ public Builder clearStartEnd() { bitField0_ = (bitField0_ & ~0x00000040); startEnd_ = 0L; onChanged(); return this; } private long finishBegin_ ; /** * optional int64 finish_begin = 8; */ public boolean hasFinishBegin() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 finish_begin = 8; */ public long getFinishBegin() { return finishBegin_; } /** * optional int64 finish_begin = 8; */ public Builder setFinishBegin(long value) { bitField0_ |= 0x00000080; finishBegin_ = value; onChanged(); return this; } /** * optional int64 finish_begin = 8; */ public Builder clearFinishBegin() { bitField0_ = (bitField0_ & ~0x00000080); finishBegin_ = 0L; onChanged(); return this; } private long finishEnd_ ; /** * optional int64 finish_end = 9; */ public boolean hasFinishEnd() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 finish_end = 9; */ public long getFinishEnd() { return finishEnd_; } /** * optional int64 finish_end = 9; */ public Builder setFinishEnd(long value) { bitField0_ |= 0x00000100; finishEnd_ = value; onChanged(); return this; } /** * optional int64 finish_end = 9; */ public Builder clearFinishEnd() { bitField0_ = (bitField0_ & ~0x00000100); finishEnd_ = 0L; onChanged(); return this; } private org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureApplicationTagsIsMutable() { if (!((bitField0_ & 0x00000200) != 0)) { applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_); bitField0_ |= 0x00000200; } } /** * repeated string applicationTags = 10; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_.getUnmodifiableView(); } /** * repeated string applicationTags = 10; */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 10; */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 10; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } /** * repeated string applicationTags = 10; */ public Builder setApplicationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.set(index, value); onChanged(); return this; } /** * repeated string applicationTags = 10; */ public Builder addApplicationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } /** * repeated string applicationTags = 10; */ public Builder addAllApplicationTags( java.lang.Iterable values) { ensureApplicationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTags_); onChanged(); return this; } /** * repeated string applicationTags = 10; */ public Builder clearApplicationTags() { applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * repeated string applicationTags = 10; */ public Builder addApplicationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } private int scope_ = 0; /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public boolean hasScope() { return ((bitField0_ & 0x00000400) != 0); } /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto getScope() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.valueOf(scope_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto.ALL : result; } /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public Builder setScope(org.apache.hadoop.yarn.proto.YarnServiceProtos.ApplicationsRequestScopeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000400; scope_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationsRequestScopeProto scope = 11 [default = ALL]; */ public Builder clearScope() { bitField0_ = (bitField0_ & ~0x00000400); scope_ = 0; onChanged(); return this; } private java.lang.Object name_ = ""; /** * optional string name = 12; */ public boolean hasName() { return ((bitField0_ & 0x00000800) != 0); } /** * optional string name = 12; */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string name = 12; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string name = 12; */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; name_ = value; onChanged(); return this; } /** * optional string name = 12; */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000800); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * optional string name = 12; */ public Builder setNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ java.util.List getApplicationsList(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ int getApplicationsCount(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ java.util.List getApplicationsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetApplicationsResponseProto} */ public static final class GetApplicationsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationsResponseProto) GetApplicationsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationsResponseProto.newBuilder() to construct. private GetApplicationsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationsResponseProto() { applications_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { applications_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } applications_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { applications_ = java.util.Collections.unmodifiableList(applications_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.Builder.class); } public static final int APPLICATIONS_FIELD_NUMBER = 1; private java.util.List applications_; /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public java.util.List getApplicationsList() { return applications_; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public java.util.List getApplicationsOrBuilderList() { return applications_; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public int getApplicationsCount() { return applications_.size(); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) { return applications_.get(index); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index) { return applications_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getApplicationsCount(); i++) { if (!getApplications(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < applications_.size(); i++) { output.writeMessage(1, applications_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < applications_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, applications_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto) obj; if (!getApplicationsList() .equals(other.getApplicationsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getApplicationsCount() > 0) { hash = (37 * hash) + APPLICATIONS_FIELD_NUMBER; hash = (53 * hash) + getApplicationsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationsBuilder_ == null) { applications_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { applicationsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto(this); int from_bitField0_ = bitField0_; if (applicationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { applications_ = java.util.Collections.unmodifiableList(applications_); bitField0_ = (bitField0_ & ~0x00000001); } result.applications_ = applications_; } else { result.applications_ = applicationsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto.getDefaultInstance()) return this; if (applicationsBuilder_ == null) { if (!other.applications_.isEmpty()) { if (applications_.isEmpty()) { applications_ = other.applications_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureApplicationsIsMutable(); applications_.addAll(other.applications_); } onChanged(); } } else { if (!other.applications_.isEmpty()) { if (applicationsBuilder_.isEmpty()) { applicationsBuilder_.dispose(); applicationsBuilder_ = null; applications_ = other.applications_; bitField0_ = (bitField0_ & ~0x00000001); applicationsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationsFieldBuilder() : null; } else { applicationsBuilder_.addAllMessages(other.applications_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getApplicationsCount(); i++) { if (!getApplications(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List applications_ = java.util.Collections.emptyList(); private void ensureApplicationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { applications_ = new java.util.ArrayList(applications_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationsBuilder_; /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public java.util.List getApplicationsList() { if (applicationsBuilder_ == null) { return java.util.Collections.unmodifiableList(applications_); } else { return applicationsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public int getApplicationsCount() { if (applicationsBuilder_ == null) { return applications_.size(); } else { return applicationsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) { if (applicationsBuilder_ == null) { return applications_.get(index); } else { return applicationsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder setApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.set(index, value); onChanged(); } else { applicationsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder setApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.set(index, builderForValue.build()); onChanged(); } else { applicationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder addApplications(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.add(value); onChanged(); } else { applicationsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder addApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.add(index, value); onChanged(); } else { applicationsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder addApplications( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.add(builderForValue.build()); onChanged(); } else { applicationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder addApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.add(index, builderForValue.build()); onChanged(); } else { applicationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder addAllApplications( java.lang.Iterable values) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applications_); onChanged(); } else { applicationsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder clearApplications() { if (applicationsBuilder_ == null) { applications_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { applicationsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public Builder removeApplications(int index) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.remove(index); onChanged(); } else { applicationsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationsBuilder( int index) { return getApplicationsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index) { if (applicationsBuilder_ == null) { return applications_.get(index); } else { return applicationsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public java.util.List getApplicationsOrBuilderList() { if (applicationsBuilder_ != null) { return applicationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applications_); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder() { return getApplicationsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder( int index) { return getApplicationsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 1; */ public java.util.List getApplicationsBuilderList() { return getApplicationsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> getApplicationsFieldBuilder() { if (applicationsBuilder_ == null) { applicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>( applications_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); applications_ = null; } return applicationsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ java.util.List getNodeStatesList(); /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ int getNodeStatesCount(); /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index); } /** * Protobuf type {@code hadoop.yarn.GetClusterNodesRequestProto} */ public static final class GetClusterNodesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodesRequestProto) GetClusterNodesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodesRequestProto.newBuilder() to construct. private GetClusterNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodesRequestProto() { nodeStates_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeStates_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeStates_.add(rawValue); } break; } case 10: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeStates_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeStates_.add(rawValue); } } input.popLimit(oldLimit); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeStates_ = java.util.Collections.unmodifiableList(nodeStates_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.Builder.class); } public static final int NODESTATES_FIELD_NUMBER = 1; private java.util.List nodeStates_; private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto> nodeStates_converter_ = new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>() { public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto convert(java.lang.Integer from) { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.valueOf(from); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result; } }; /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public java.util.List getNodeStatesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>(nodeStates_, nodeStates_converter_); } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public int getNodeStatesCount() { return nodeStates_.size(); } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index) { return nodeStates_converter_.convert(nodeStates_.get(index)); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeStates_.size(); i++) { output.writeEnum(1, nodeStates_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < nodeStates_.size(); i++) { dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSizeNoTag(nodeStates_.get(i)); } size += dataSize; size += 1 * nodeStates_.size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto) obj; if (!nodeStates_.equals(other.nodeStates_)) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeStatesCount() > 0) { hash = (37 * hash) + NODESTATES_FIELD_NUMBER; hash = (53 * hash) + nodeStates_.hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); nodeStates_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { nodeStates_ = java.util.Collections.unmodifiableList(nodeStates_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeStates_ = nodeStates_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto.getDefaultInstance()) return this; if (!other.nodeStates_.isEmpty()) { if (nodeStates_.isEmpty()) { nodeStates_ = other.nodeStates_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeStatesIsMutable(); nodeStates_.addAll(other.nodeStates_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodeStates_ = java.util.Collections.emptyList(); private void ensureNodeStatesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeStates_ = new java.util.ArrayList(nodeStates_); bitField0_ |= 0x00000001; } } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public java.util.List getNodeStatesList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto>(nodeStates_, nodeStates_converter_); } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public int getNodeStatesCount() { return nodeStates_.size(); } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeStates(int index) { return nodeStates_converter_.convert(nodeStates_.get(index)); } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public Builder setNodeStates( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) { if (value == null) { throw new NullPointerException(); } ensureNodeStatesIsMutable(); nodeStates_.set(index, value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public Builder addNodeStates(org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) { if (value == null) { throw new NullPointerException(); } ensureNodeStatesIsMutable(); nodeStates_.add(value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public Builder addAllNodeStates( java.lang.Iterable values) { ensureNodeStatesIsMutable(); for (org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value : values) { nodeStates_.add(value.getNumber()); } onChanged(); return this; } /** * repeated .hadoop.yarn.NodeStateProto nodeStates = 1; */ public Builder clearNodeStates() { nodeStates_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ java.util.List getNodeReportsList(); /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index); /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ int getNodeReportsCount(); /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ java.util.List getNodeReportsOrBuilderList(); /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetClusterNodesResponseProto} */ public static final class GetClusterNodesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodesResponseProto) GetClusterNodesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodesResponseProto.newBuilder() to construct. private GetClusterNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodesResponseProto() { nodeReports_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeReports_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeReports_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeReports_ = java.util.Collections.unmodifiableList(nodeReports_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.Builder.class); } public static final int NODEREPORTS_FIELD_NUMBER = 1; private java.util.List nodeReports_; /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public java.util.List getNodeReportsList() { return nodeReports_; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public java.util.List getNodeReportsOrBuilderList() { return nodeReports_; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public int getNodeReportsCount() { return nodeReports_.size(); } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index) { return nodeReports_.get(index); } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder( int index) { return nodeReports_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodeReportsCount(); i++) { if (!getNodeReports(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeReports_.size(); i++) { output.writeMessage(1, nodeReports_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < nodeReports_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, nodeReports_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto) obj; if (!getNodeReportsList() .equals(other.getNodeReportsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeReportsCount() > 0) { hash = (37 * hash) + NODEREPORTS_FIELD_NUMBER; hash = (53 * hash) + getNodeReportsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeReportsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeReportsBuilder_ == null) { nodeReports_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodeReportsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto(this); int from_bitField0_ = bitField0_; if (nodeReportsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { nodeReports_ = java.util.Collections.unmodifiableList(nodeReports_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeReports_ = nodeReports_; } else { result.nodeReports_ = nodeReportsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto.getDefaultInstance()) return this; if (nodeReportsBuilder_ == null) { if (!other.nodeReports_.isEmpty()) { if (nodeReports_.isEmpty()) { nodeReports_ = other.nodeReports_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeReportsIsMutable(); nodeReports_.addAll(other.nodeReports_); } onChanged(); } } else { if (!other.nodeReports_.isEmpty()) { if (nodeReportsBuilder_.isEmpty()) { nodeReportsBuilder_.dispose(); nodeReportsBuilder_ = null; nodeReports_ = other.nodeReports_; bitField0_ = (bitField0_ & ~0x00000001); nodeReportsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeReportsFieldBuilder() : null; } else { nodeReportsBuilder_.addAllMessages(other.nodeReports_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodeReportsCount(); i++) { if (!getNodeReports(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodeReports_ = java.util.Collections.emptyList(); private void ensureNodeReportsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeReports_ = new java.util.ArrayList(nodeReports_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> nodeReportsBuilder_; /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public java.util.List getNodeReportsList() { if (nodeReportsBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeReports_); } else { return nodeReportsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public int getNodeReportsCount() { if (nodeReportsBuilder_ == null) { return nodeReports_.size(); } else { return nodeReportsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getNodeReports(int index) { if (nodeReportsBuilder_ == null) { return nodeReports_.get(index); } else { return nodeReportsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder setNodeReports( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (nodeReportsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeReportsIsMutable(); nodeReports_.set(index, value); onChanged(); } else { nodeReportsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder setNodeReports( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (nodeReportsBuilder_ == null) { ensureNodeReportsIsMutable(); nodeReports_.set(index, builderForValue.build()); onChanged(); } else { nodeReportsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder addNodeReports(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (nodeReportsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeReportsIsMutable(); nodeReports_.add(value); onChanged(); } else { nodeReportsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder addNodeReports( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto value) { if (nodeReportsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeReportsIsMutable(); nodeReports_.add(index, value); onChanged(); } else { nodeReportsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder addNodeReports( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (nodeReportsBuilder_ == null) { ensureNodeReportsIsMutable(); nodeReports_.add(builderForValue.build()); onChanged(); } else { nodeReportsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder addNodeReports( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder builderForValue) { if (nodeReportsBuilder_ == null) { ensureNodeReportsIsMutable(); nodeReports_.add(index, builderForValue.build()); onChanged(); } else { nodeReportsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder addAllNodeReports( java.lang.Iterable values) { if (nodeReportsBuilder_ == null) { ensureNodeReportsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeReports_); onChanged(); } else { nodeReportsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder clearNodeReports() { if (nodeReportsBuilder_ == null) { nodeReports_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodeReportsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public Builder removeNodeReports(int index) { if (nodeReportsBuilder_ == null) { ensureNodeReportsIsMutable(); nodeReports_.remove(index); onChanged(); } else { nodeReportsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder getNodeReportsBuilder( int index) { return getNodeReportsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder getNodeReportsOrBuilder( int index) { if (nodeReportsBuilder_ == null) { return nodeReports_.get(index); } else { return nodeReportsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public java.util.List getNodeReportsOrBuilderList() { if (nodeReportsBuilder_ != null) { return nodeReportsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeReports_); } } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addNodeReportsBuilder() { return getNodeReportsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder addNodeReportsBuilder( int index) { return getNodeReportsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeReportProto nodeReports = 1; */ public java.util.List getNodeReportsBuilderList() { return getNodeReportsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder> getNodeReportsFieldBuilder() { if (nodeReportsBuilder_ == null) { nodeReportsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder>( nodeReports_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); nodeReports_ = null; } return nodeReportsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetQueueInfoRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueInfoRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string queueName = 1; */ boolean hasQueueName(); /** * optional string queueName = 1; */ java.lang.String getQueueName(); /** * optional string queueName = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes(); /** * optional bool includeApplications = 2; */ boolean hasIncludeApplications(); /** * optional bool includeApplications = 2; */ boolean getIncludeApplications(); /** * optional bool includeChildQueues = 3; */ boolean hasIncludeChildQueues(); /** * optional bool includeChildQueues = 3; */ boolean getIncludeChildQueues(); /** * optional bool recursive = 4; */ boolean hasRecursive(); /** * optional bool recursive = 4; */ boolean getRecursive(); } /** * Protobuf type {@code hadoop.yarn.GetQueueInfoRequestProto} */ public static final class GetQueueInfoRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueInfoRequestProto) GetQueueInfoRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetQueueInfoRequestProto.newBuilder() to construct. private GetQueueInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetQueueInfoRequestProto() { queueName_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetQueueInfoRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; queueName_ = bs; break; } case 16: { bitField0_ |= 0x00000002; includeApplications_ = input.readBool(); break; } case 24: { bitField0_ |= 0x00000004; includeChildQueues_ = input.readBool(); break; } case 32: { bitField0_ |= 0x00000008; recursive_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.Builder.class); } private int bitField0_; public static final int QUEUENAME_FIELD_NUMBER = 1; private volatile java.lang.Object queueName_; /** * optional string queueName = 1; */ public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; */ public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } } /** * optional string queueName = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int INCLUDEAPPLICATIONS_FIELD_NUMBER = 2; private boolean includeApplications_; /** * optional bool includeApplications = 2; */ public boolean hasIncludeApplications() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool includeApplications = 2; */ public boolean getIncludeApplications() { return includeApplications_; } public static final int INCLUDECHILDQUEUES_FIELD_NUMBER = 3; private boolean includeChildQueues_; /** * optional bool includeChildQueues = 3; */ public boolean hasIncludeChildQueues() { return ((bitField0_ & 0x00000004) != 0); } /** * optional bool includeChildQueues = 3; */ public boolean getIncludeChildQueues() { return includeChildQueues_; } public static final int RECURSIVE_FIELD_NUMBER = 4; private boolean recursive_; /** * optional bool recursive = 4; */ public boolean hasRecursive() { return ((bitField0_ & 0x00000008) != 0); } /** * optional bool recursive = 4; */ public boolean getRecursive() { return recursive_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeBool(2, includeApplications_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeBool(3, includeChildQueues_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeBool(4, recursive_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(2, includeApplications_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(3, includeChildQueues_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(4, recursive_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto) obj; if (hasQueueName() != other.hasQueueName()) return false; if (hasQueueName()) { if (!getQueueName() .equals(other.getQueueName())) return false; } if (hasIncludeApplications() != other.hasIncludeApplications()) return false; if (hasIncludeApplications()) { if (getIncludeApplications() != other.getIncludeApplications()) return false; } if (hasIncludeChildQueues() != other.hasIncludeChildQueues()) return false; if (hasIncludeChildQueues()) { if (getIncludeChildQueues() != other.getIncludeChildQueues()) return false; } if (hasRecursive() != other.hasRecursive()) return false; if (hasRecursive()) { if (getRecursive() != other.getRecursive()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueueName()) { hash = (37 * hash) + QUEUENAME_FIELD_NUMBER; hash = (53 * hash) + getQueueName().hashCode(); } if (hasIncludeApplications()) { hash = (37 * hash) + INCLUDEAPPLICATIONS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIncludeApplications()); } if (hasIncludeChildQueues()) { hash = (37 * hash) + INCLUDECHILDQUEUES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIncludeChildQueues()); } if (hasRecursive()) { hash = (37 * hash) + RECURSIVE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getRecursive()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetQueueInfoRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueInfoRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); queueName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); includeApplications_ = false; bitField0_ = (bitField0_ & ~0x00000002); includeChildQueues_ = false; bitField0_ = (bitField0_ & ~0x00000004); recursive_ = false; bitField0_ = (bitField0_ & ~0x00000008); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.queueName_ = queueName_; if (((from_bitField0_ & 0x00000002) != 0)) { result.includeApplications_ = includeApplications_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.includeChildQueues_ = includeChildQueues_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.recursive_ = recursive_; to_bitField0_ |= 0x00000008; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto.getDefaultInstance()) return this; if (other.hasQueueName()) { bitField0_ |= 0x00000001; queueName_ = other.queueName_; onChanged(); } if (other.hasIncludeApplications()) { setIncludeApplications(other.getIncludeApplications()); } if (other.hasIncludeChildQueues()) { setIncludeChildQueues(other.getIncludeChildQueues()); } if (other.hasRecursive()) { setRecursive(other.getRecursive()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object queueName_ = ""; /** * optional string queueName = 1; */ public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; */ public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queueName = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queueName = 1; */ public Builder setQueueName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queueName_ = value; onChanged(); return this; } /** * optional string queueName = 1; */ public Builder clearQueueName() { bitField0_ = (bitField0_ & ~0x00000001); queueName_ = getDefaultInstance().getQueueName(); onChanged(); return this; } /** * optional string queueName = 1; */ public Builder setQueueNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queueName_ = value; onChanged(); return this; } private boolean includeApplications_ ; /** * optional bool includeApplications = 2; */ public boolean hasIncludeApplications() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool includeApplications = 2; */ public boolean getIncludeApplications() { return includeApplications_; } /** * optional bool includeApplications = 2; */ public Builder setIncludeApplications(boolean value) { bitField0_ |= 0x00000002; includeApplications_ = value; onChanged(); return this; } /** * optional bool includeApplications = 2; */ public Builder clearIncludeApplications() { bitField0_ = (bitField0_ & ~0x00000002); includeApplications_ = false; onChanged(); return this; } private boolean includeChildQueues_ ; /** * optional bool includeChildQueues = 3; */ public boolean hasIncludeChildQueues() { return ((bitField0_ & 0x00000004) != 0); } /** * optional bool includeChildQueues = 3; */ public boolean getIncludeChildQueues() { return includeChildQueues_; } /** * optional bool includeChildQueues = 3; */ public Builder setIncludeChildQueues(boolean value) { bitField0_ |= 0x00000004; includeChildQueues_ = value; onChanged(); return this; } /** * optional bool includeChildQueues = 3; */ public Builder clearIncludeChildQueues() { bitField0_ = (bitField0_ & ~0x00000004); includeChildQueues_ = false; onChanged(); return this; } private boolean recursive_ ; /** * optional bool recursive = 4; */ public boolean hasRecursive() { return ((bitField0_ & 0x00000008) != 0); } /** * optional bool recursive = 4; */ public boolean getRecursive() { return recursive_; } /** * optional bool recursive = 4; */ public Builder setRecursive(boolean value) { bitField0_ |= 0x00000008; recursive_ = value; onChanged(); return this; } /** * optional bool recursive = 4; */ public Builder clearRecursive() { bitField0_ = (bitField0_ & ~0x00000008); recursive_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueInfoRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueInfoRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetQueueInfoRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetQueueInfoRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetQueueInfoResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueInfoResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ boolean hasQueueInfo(); /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo(); /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetQueueInfoResponseProto} */ public static final class GetQueueInfoResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueInfoResponseProto) GetQueueInfoResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetQueueInfoResponseProto.newBuilder() to construct. private GetQueueInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetQueueInfoResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetQueueInfoResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = queueInfo_.toBuilder(); } queueInfo_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(queueInfo_); queueInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.Builder.class); } private int bitField0_; public static final int QUEUEINFO_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto queueInfo_; /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public boolean hasQueueInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo() { return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_; } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder() { return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasQueueInfo()) { if (!getQueueInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getQueueInfo()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getQueueInfo()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto) obj; if (hasQueueInfo() != other.hasQueueInfo()) return false; if (hasQueueInfo()) { if (!getQueueInfo() .equals(other.getQueueInfo())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueueInfo()) { hash = (37 * hash) + QUEUEINFO_FIELD_NUMBER; hash = (53 * hash) + getQueueInfo().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetQueueInfoResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueInfoResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getQueueInfoFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (queueInfoBuilder_ == null) { queueInfo_ = null; } else { queueInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (queueInfoBuilder_ == null) { result.queueInfo_ = queueInfo_; } else { result.queueInfo_ = queueInfoBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto.getDefaultInstance()) return this; if (other.hasQueueInfo()) { mergeQueueInfo(other.getQueueInfo()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasQueueInfo()) { if (!getQueueInfo().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto queueInfo_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> queueInfoBuilder_; /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public boolean hasQueueInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getQueueInfo() { if (queueInfoBuilder_ == null) { return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_; } else { return queueInfoBuilder_.getMessage(); } } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public Builder setQueueInfo(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) { if (queueInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } queueInfo_ = value; onChanged(); } else { queueInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public Builder setQueueInfo( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) { if (queueInfoBuilder_ == null) { queueInfo_ = builderForValue.build(); onChanged(); } else { queueInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public Builder mergeQueueInfo(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) { if (queueInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && queueInfo_ != null && queueInfo_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()) { queueInfo_ = org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.newBuilder(queueInfo_).mergeFrom(value).buildPartial(); } else { queueInfo_ = value; } onChanged(); } else { queueInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public Builder clearQueueInfo() { if (queueInfoBuilder_ == null) { queueInfo_ = null; onChanged(); } else { queueInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder getQueueInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getQueueInfoFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getQueueInfoOrBuilder() { if (queueInfoBuilder_ != null) { return queueInfoBuilder_.getMessageOrBuilder(); } else { return queueInfo_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance() : queueInfo_; } } /** * optional .hadoop.yarn.QueueInfoProto queueInfo = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> getQueueInfoFieldBuilder() { if (queueInfoBuilder_ == null) { queueInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder>( getQueueInfo(), getParentForChildren(), isClean()); queueInfo_ = null; } return queueInfoBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueInfoResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueInfoResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetQueueInfoResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetQueueInfoResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetQueueUserAclsInfoRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueUserAclsInfoRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoRequestProto} */ public static final class GetQueueUserAclsInfoRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueUserAclsInfoRequestProto) GetQueueUserAclsInfoRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetQueueUserAclsInfoRequestProto.newBuilder() to construct. private GetQueueUserAclsInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetQueueUserAclsInfoRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetQueueUserAclsInfoRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueUserAclsInfoRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueUserAclsInfoRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueUserAclsInfoRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetQueueUserAclsInfoRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetQueueUserAclsInfoRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetQueueUserAclsInfoResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetQueueUserAclsInfoResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ java.util.List getQueueUserAclsList(); /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index); /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ int getQueueUserAclsCount(); /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ java.util.List getQueueUserAclsOrBuilderList(); /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoResponseProto} */ public static final class GetQueueUserAclsInfoResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetQueueUserAclsInfoResponseProto) GetQueueUserAclsInfoResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetQueueUserAclsInfoResponseProto.newBuilder() to construct. private GetQueueUserAclsInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetQueueUserAclsInfoResponseProto() { queueUserAcls_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetQueueUserAclsInfoResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { queueUserAcls_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } queueUserAcls_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { queueUserAcls_ = java.util.Collections.unmodifiableList(queueUserAcls_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.Builder.class); } public static final int QUEUEUSERACLS_FIELD_NUMBER = 1; private java.util.List queueUserAcls_; /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public java.util.List getQueueUserAclsList() { return queueUserAcls_; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public java.util.List getQueueUserAclsOrBuilderList() { return queueUserAcls_; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public int getQueueUserAclsCount() { return queueUserAcls_.size(); } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index) { return queueUserAcls_.get(index); } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder( int index) { return queueUserAcls_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < queueUserAcls_.size(); i++) { output.writeMessage(1, queueUserAcls_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < queueUserAcls_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, queueUserAcls_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto) obj; if (!getQueueUserAclsList() .equals(other.getQueueUserAclsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getQueueUserAclsCount() > 0) { hash = (37 * hash) + QUEUEUSERACLS_FIELD_NUMBER; hash = (53 * hash) + getQueueUserAclsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetQueueUserAclsInfoResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetQueueUserAclsInfoResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getQueueUserAclsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (queueUserAclsBuilder_ == null) { queueUserAcls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { queueUserAclsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto(this); int from_bitField0_ = bitField0_; if (queueUserAclsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { queueUserAcls_ = java.util.Collections.unmodifiableList(queueUserAcls_); bitField0_ = (bitField0_ & ~0x00000001); } result.queueUserAcls_ = queueUserAcls_; } else { result.queueUserAcls_ = queueUserAclsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto.getDefaultInstance()) return this; if (queueUserAclsBuilder_ == null) { if (!other.queueUserAcls_.isEmpty()) { if (queueUserAcls_.isEmpty()) { queueUserAcls_ = other.queueUserAcls_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureQueueUserAclsIsMutable(); queueUserAcls_.addAll(other.queueUserAcls_); } onChanged(); } } else { if (!other.queueUserAcls_.isEmpty()) { if (queueUserAclsBuilder_.isEmpty()) { queueUserAclsBuilder_.dispose(); queueUserAclsBuilder_ = null; queueUserAcls_ = other.queueUserAcls_; bitField0_ = (bitField0_ & ~0x00000001); queueUserAclsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getQueueUserAclsFieldBuilder() : null; } else { queueUserAclsBuilder_.addAllMessages(other.queueUserAcls_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List queueUserAcls_ = java.util.Collections.emptyList(); private void ensureQueueUserAclsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { queueUserAcls_ = new java.util.ArrayList(queueUserAcls_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> queueUserAclsBuilder_; /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public java.util.List getQueueUserAclsList() { if (queueUserAclsBuilder_ == null) { return java.util.Collections.unmodifiableList(queueUserAcls_); } else { return queueUserAclsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public int getQueueUserAclsCount() { if (queueUserAclsBuilder_ == null) { return queueUserAcls_.size(); } else { return queueUserAclsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getQueueUserAcls(int index) { if (queueUserAclsBuilder_ == null) { return queueUserAcls_.get(index); } else { return queueUserAclsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder setQueueUserAcls( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) { if (queueUserAclsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueUserAclsIsMutable(); queueUserAcls_.set(index, value); onChanged(); } else { queueUserAclsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder setQueueUserAcls( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) { if (queueUserAclsBuilder_ == null) { ensureQueueUserAclsIsMutable(); queueUserAcls_.set(index, builderForValue.build()); onChanged(); } else { queueUserAclsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder addQueueUserAcls(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) { if (queueUserAclsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueUserAclsIsMutable(); queueUserAcls_.add(value); onChanged(); } else { queueUserAclsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder addQueueUserAcls( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto value) { if (queueUserAclsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueUserAclsIsMutable(); queueUserAcls_.add(index, value); onChanged(); } else { queueUserAclsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder addQueueUserAcls( org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) { if (queueUserAclsBuilder_ == null) { ensureQueueUserAclsIsMutable(); queueUserAcls_.add(builderForValue.build()); onChanged(); } else { queueUserAclsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder addQueueUserAcls( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder builderForValue) { if (queueUserAclsBuilder_ == null) { ensureQueueUserAclsIsMutable(); queueUserAcls_.add(index, builderForValue.build()); onChanged(); } else { queueUserAclsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder addAllQueueUserAcls( java.lang.Iterable values) { if (queueUserAclsBuilder_ == null) { ensureQueueUserAclsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, queueUserAcls_); onChanged(); } else { queueUserAclsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder clearQueueUserAcls() { if (queueUserAclsBuilder_ == null) { queueUserAcls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { queueUserAclsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public Builder removeQueueUserAcls(int index) { if (queueUserAclsBuilder_ == null) { ensureQueueUserAclsIsMutable(); queueUserAcls_.remove(index); onChanged(); } else { queueUserAclsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder getQueueUserAclsBuilder( int index) { return getQueueUserAclsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder getQueueUserAclsOrBuilder( int index) { if (queueUserAclsBuilder_ == null) { return queueUserAcls_.get(index); } else { return queueUserAclsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public java.util.List getQueueUserAclsOrBuilderList() { if (queueUserAclsBuilder_ != null) { return queueUserAclsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(queueUserAcls_); } } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder addQueueUserAclsBuilder() { return getQueueUserAclsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder addQueueUserAclsBuilder( int index) { return getQueueUserAclsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueUserACLInfoProto queueUserAcls = 1; */ public java.util.List getQueueUserAclsBuilderList() { return getQueueUserAclsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder> getQueueUserAclsFieldBuilder() { if (queueUserAclsBuilder_ == null) { queueUserAclsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder>( queueUserAcls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); queueUserAcls_ = null; } return queueUserAclsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetQueueUserAclsInfoResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetQueueUserAclsInfoResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetQueueUserAclsInfoResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetQueueUserAclsInfoResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNodesToLabelsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToLabelsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetNodesToLabelsRequestProto} */ public static final class GetNodesToLabelsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToLabelsRequestProto) GetNodesToLabelsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNodesToLabelsRequestProto.newBuilder() to construct. private GetNodesToLabelsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNodesToLabelsRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNodesToLabelsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNodesToLabelsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToLabelsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToLabelsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToLabelsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNodesToLabelsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNodesToLabelsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNodesToLabelsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToLabelsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ java.util.List getNodeToLabelsList(); /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index); /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ int getNodeToLabelsCount(); /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ java.util.List getNodeToLabelsOrBuilderList(); /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetNodesToLabelsResponseProto} */ public static final class GetNodesToLabelsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToLabelsResponseProto) GetNodesToLabelsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNodesToLabelsResponseProto.newBuilder() to construct. private GetNodesToLabelsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNodesToLabelsResponseProto() { nodeToLabels_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNodesToLabelsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeToLabels_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeToLabels_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeToLabels_ = java.util.Collections.unmodifiableList(nodeToLabels_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.Builder.class); } public static final int NODETOLABELS_FIELD_NUMBER = 1; private java.util.List nodeToLabels_; /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public java.util.List getNodeToLabelsList() { return nodeToLabels_; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public java.util.List getNodeToLabelsOrBuilderList() { return nodeToLabels_; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public int getNodeToLabelsCount() { return nodeToLabels_.size(); } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) { return nodeToLabels_.get(index); } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder( int index) { return nodeToLabels_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeToLabels_.size(); i++) { output.writeMessage(1, nodeToLabels_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < nodeToLabels_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, nodeToLabels_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto) obj; if (!getNodeToLabelsList() .equals(other.getNodeToLabelsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeToLabelsCount() > 0) { hash = (37 * hash) + NODETOLABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeToLabelsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNodesToLabelsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToLabelsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeToLabelsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeToLabelsBuilder_ == null) { nodeToLabels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodeToLabelsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto(this); int from_bitField0_ = bitField0_; if (nodeToLabelsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { nodeToLabels_ = java.util.Collections.unmodifiableList(nodeToLabels_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeToLabels_ = nodeToLabels_; } else { result.nodeToLabels_ = nodeToLabelsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto.getDefaultInstance()) return this; if (nodeToLabelsBuilder_ == null) { if (!other.nodeToLabels_.isEmpty()) { if (nodeToLabels_.isEmpty()) { nodeToLabels_ = other.nodeToLabels_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeToLabelsIsMutable(); nodeToLabels_.addAll(other.nodeToLabels_); } onChanged(); } } else { if (!other.nodeToLabels_.isEmpty()) { if (nodeToLabelsBuilder_.isEmpty()) { nodeToLabelsBuilder_.dispose(); nodeToLabelsBuilder_ = null; nodeToLabels_ = other.nodeToLabels_; bitField0_ = (bitField0_ & ~0x00000001); nodeToLabelsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeToLabelsFieldBuilder() : null; } else { nodeToLabelsBuilder_.addAllMessages(other.nodeToLabels_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodeToLabels_ = java.util.Collections.emptyList(); private void ensureNodeToLabelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeToLabels_ = new java.util.ArrayList(nodeToLabels_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> nodeToLabelsBuilder_; /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public java.util.List getNodeToLabelsList() { if (nodeToLabelsBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeToLabels_); } else { return nodeToLabelsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public int getNodeToLabelsCount() { if (nodeToLabelsBuilder_ == null) { return nodeToLabels_.size(); } else { return nodeToLabelsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getNodeToLabels(int index) { if (nodeToLabelsBuilder_ == null) { return nodeToLabels_.get(index); } else { return nodeToLabelsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder setNodeToLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) { if (nodeToLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeToLabelsIsMutable(); nodeToLabels_.set(index, value); onChanged(); } else { nodeToLabelsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder setNodeToLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) { if (nodeToLabelsBuilder_ == null) { ensureNodeToLabelsIsMutable(); nodeToLabels_.set(index, builderForValue.build()); onChanged(); } else { nodeToLabelsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder addNodeToLabels(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) { if (nodeToLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeToLabelsIsMutable(); nodeToLabels_.add(value); onChanged(); } else { nodeToLabelsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder addNodeToLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto value) { if (nodeToLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeToLabelsIsMutable(); nodeToLabels_.add(index, value); onChanged(); } else { nodeToLabelsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder addNodeToLabels( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) { if (nodeToLabelsBuilder_ == null) { ensureNodeToLabelsIsMutable(); nodeToLabels_.add(builderForValue.build()); onChanged(); } else { nodeToLabelsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder addNodeToLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder builderForValue) { if (nodeToLabelsBuilder_ == null) { ensureNodeToLabelsIsMutable(); nodeToLabels_.add(index, builderForValue.build()); onChanged(); } else { nodeToLabelsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder addAllNodeToLabels( java.lang.Iterable values) { if (nodeToLabelsBuilder_ == null) { ensureNodeToLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeToLabels_); onChanged(); } else { nodeToLabelsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder clearNodeToLabels() { if (nodeToLabelsBuilder_ == null) { nodeToLabels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodeToLabelsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public Builder removeNodeToLabels(int index) { if (nodeToLabelsBuilder_ == null) { ensureNodeToLabelsIsMutable(); nodeToLabels_.remove(index); onChanged(); } else { nodeToLabelsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder getNodeToLabelsBuilder( int index) { return getNodeToLabelsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder getNodeToLabelsOrBuilder( int index) { if (nodeToLabelsBuilder_ == null) { return nodeToLabels_.get(index); } else { return nodeToLabelsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public java.util.List getNodeToLabelsOrBuilderList() { if (nodeToLabelsBuilder_ != null) { return nodeToLabelsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeToLabels_); } } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder() { return getNodeToLabelsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder addNodeToLabelsBuilder( int index) { return getNodeToLabelsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeIdToLabelsProto nodeToLabels = 1; */ public java.util.List getNodeToLabelsBuilderList() { return getNodeToLabelsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder> getNodeToLabelsFieldBuilder() { if (nodeToLabelsBuilder_ == null) { nodeToLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder>( nodeToLabels_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); nodeToLabels_ = null; } return nodeToLabelsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToLabelsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToLabelsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNodesToLabelsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNodesToLabelsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetLabelsToNodesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLabelsToNodesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string nodeLabels = 1; */ java.util.List getNodeLabelsList(); /** * repeated string nodeLabels = 1; */ int getNodeLabelsCount(); /** * repeated string nodeLabels = 1; */ java.lang.String getNodeLabels(int index); /** * repeated string nodeLabels = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index); } /** * Protobuf type {@code hadoop.yarn.GetLabelsToNodesRequestProto} */ public static final class GetLabelsToNodesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLabelsToNodesRequestProto) GetLabelsToNodesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetLabelsToNodesRequestProto.newBuilder() to construct. private GetLabelsToNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetLabelsToNodesRequestProto() { nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetLabelsToNodesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } nodeLabels_.add(bs); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeLabels_ = nodeLabels_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.Builder.class); } public static final int NODELABELS_FIELD_NUMBER = 1; private org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_; /** * repeated string nodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_; } /** * repeated string nodeLabels = 1; */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string nodeLabels = 1; */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string nodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeLabels_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, nodeLabels_.getRaw(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < nodeLabels_.size(); i++) { dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i)); } size += dataSize; size += 1 * getNodeLabelsList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto) obj; if (!getNodeLabelsList() .equals(other.getNodeLabelsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeLabelsCount() > 0) { hash = (37 * hash) + NODELABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetLabelsToNodesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLabelsToNodesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { nodeLabels_ = nodeLabels_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeLabels_ = nodeLabels_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto.getDefaultInstance()) return this; if (!other.nodeLabels_.isEmpty()) { if (nodeLabels_.isEmpty()) { nodeLabels_ = other.nodeLabels_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeLabelsIsMutable(); nodeLabels_.addAll(other.nodeLabels_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_); bitField0_ |= 0x00000001; } } /** * repeated string nodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_.getUnmodifiableView(); } /** * repeated string nodeLabels = 1; */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string nodeLabels = 1; */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string nodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } /** * repeated string nodeLabels = 1; */ public Builder setNodeLabels( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.set(index, value); onChanged(); return this; } /** * repeated string nodeLabels = 1; */ public Builder addNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } /** * repeated string nodeLabels = 1; */ public Builder addAllNodeLabels( java.lang.Iterable values) { ensureNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeLabels_); onChanged(); return this; } /** * repeated string nodeLabels = 1; */ public Builder clearNodeLabels() { nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string nodeLabels = 1; */ public Builder addNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLabelsToNodesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLabelsToNodesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetLabelsToNodesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetLabelsToNodesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetLabelsToNodesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLabelsToNodesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ java.util.List getLabelsToNodesList(); /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index); /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ int getLabelsToNodesCount(); /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ java.util.List getLabelsToNodesOrBuilderList(); /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetLabelsToNodesResponseProto} */ public static final class GetLabelsToNodesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLabelsToNodesResponseProto) GetLabelsToNodesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetLabelsToNodesResponseProto.newBuilder() to construct. private GetLabelsToNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetLabelsToNodesResponseProto() { labelsToNodes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetLabelsToNodesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { labelsToNodes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } labelsToNodes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { labelsToNodes_ = java.util.Collections.unmodifiableList(labelsToNodes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.Builder.class); } public static final int LABELSTONODES_FIELD_NUMBER = 1; private java.util.List labelsToNodes_; /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public java.util.List getLabelsToNodesList() { return labelsToNodes_; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public java.util.List getLabelsToNodesOrBuilderList() { return labelsToNodes_; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public int getLabelsToNodesCount() { return labelsToNodes_.size(); } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index) { return labelsToNodes_.get(index); } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder( int index) { return labelsToNodes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < labelsToNodes_.size(); i++) { output.writeMessage(1, labelsToNodes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < labelsToNodes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, labelsToNodes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto) obj; if (!getLabelsToNodesList() .equals(other.getLabelsToNodesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLabelsToNodesCount() > 0) { hash = (37 * hash) + LABELSTONODES_FIELD_NUMBER; hash = (53 * hash) + getLabelsToNodesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetLabelsToNodesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLabelsToNodesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getLabelsToNodesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (labelsToNodesBuilder_ == null) { labelsToNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { labelsToNodesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto(this); int from_bitField0_ = bitField0_; if (labelsToNodesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { labelsToNodes_ = java.util.Collections.unmodifiableList(labelsToNodes_); bitField0_ = (bitField0_ & ~0x00000001); } result.labelsToNodes_ = labelsToNodes_; } else { result.labelsToNodes_ = labelsToNodesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto.getDefaultInstance()) return this; if (labelsToNodesBuilder_ == null) { if (!other.labelsToNodes_.isEmpty()) { if (labelsToNodes_.isEmpty()) { labelsToNodes_ = other.labelsToNodes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLabelsToNodesIsMutable(); labelsToNodes_.addAll(other.labelsToNodes_); } onChanged(); } } else { if (!other.labelsToNodes_.isEmpty()) { if (labelsToNodesBuilder_.isEmpty()) { labelsToNodesBuilder_.dispose(); labelsToNodesBuilder_ = null; labelsToNodes_ = other.labelsToNodes_; bitField0_ = (bitField0_ & ~0x00000001); labelsToNodesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLabelsToNodesFieldBuilder() : null; } else { labelsToNodesBuilder_.addAllMessages(other.labelsToNodes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List labelsToNodes_ = java.util.Collections.emptyList(); private void ensureLabelsToNodesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { labelsToNodes_ = new java.util.ArrayList(labelsToNodes_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> labelsToNodesBuilder_; /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public java.util.List getLabelsToNodesList() { if (labelsToNodesBuilder_ == null) { return java.util.Collections.unmodifiableList(labelsToNodes_); } else { return labelsToNodesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public int getLabelsToNodesCount() { if (labelsToNodesBuilder_ == null) { return labelsToNodes_.size(); } else { return labelsToNodesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getLabelsToNodes(int index) { if (labelsToNodesBuilder_ == null) { return labelsToNodes_.get(index); } else { return labelsToNodesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder setLabelsToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) { if (labelsToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLabelsToNodesIsMutable(); labelsToNodes_.set(index, value); onChanged(); } else { labelsToNodesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder setLabelsToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) { if (labelsToNodesBuilder_ == null) { ensureLabelsToNodesIsMutable(); labelsToNodes_.set(index, builderForValue.build()); onChanged(); } else { labelsToNodesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder addLabelsToNodes(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) { if (labelsToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLabelsToNodesIsMutable(); labelsToNodes_.add(value); onChanged(); } else { labelsToNodesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder addLabelsToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto value) { if (labelsToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLabelsToNodesIsMutable(); labelsToNodes_.add(index, value); onChanged(); } else { labelsToNodesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder addLabelsToNodes( org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) { if (labelsToNodesBuilder_ == null) { ensureLabelsToNodesIsMutable(); labelsToNodes_.add(builderForValue.build()); onChanged(); } else { labelsToNodesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder addLabelsToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder builderForValue) { if (labelsToNodesBuilder_ == null) { ensureLabelsToNodesIsMutable(); labelsToNodes_.add(index, builderForValue.build()); onChanged(); } else { labelsToNodesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder addAllLabelsToNodes( java.lang.Iterable values) { if (labelsToNodesBuilder_ == null) { ensureLabelsToNodesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, labelsToNodes_); onChanged(); } else { labelsToNodesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder clearLabelsToNodes() { if (labelsToNodesBuilder_ == null) { labelsToNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { labelsToNodesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public Builder removeLabelsToNodes(int index) { if (labelsToNodesBuilder_ == null) { ensureLabelsToNodesIsMutable(); labelsToNodes_.remove(index); onChanged(); } else { labelsToNodesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder getLabelsToNodesBuilder( int index) { return getLabelsToNodesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder getLabelsToNodesOrBuilder( int index) { if (labelsToNodesBuilder_ == null) { return labelsToNodes_.get(index); } else { return labelsToNodesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public java.util.List getLabelsToNodesOrBuilderList() { if (labelsToNodesBuilder_ != null) { return labelsToNodesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(labelsToNodes_); } } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder addLabelsToNodesBuilder() { return getLabelsToNodesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder addLabelsToNodesBuilder( int index) { return getLabelsToNodesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.LabelsToNodeIdsProto labelsToNodes = 1; */ public java.util.List getLabelsToNodesBuilderList() { return getLabelsToNodesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder> getLabelsToNodesFieldBuilder() { if (labelsToNodesBuilder_ == null) { labelsToNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder>( labelsToNodes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); labelsToNodes_ = null; } return labelsToNodesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLabelsToNodesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLabelsToNodesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetLabelsToNodesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetLabelsToNodesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodeLabelsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeLabelsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsRequestProto} */ public static final class GetClusterNodeLabelsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeLabelsRequestProto) GetClusterNodeLabelsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodeLabelsRequestProto.newBuilder() to construct. private GetClusterNodeLabelsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodeLabelsRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodeLabelsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeLabelsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeLabelsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeLabelsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodeLabelsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodeLabelsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodeLabelsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeLabelsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string deprecatedNodeLabels = 1; */ java.util.List getDeprecatedNodeLabelsList(); /** * repeated string deprecatedNodeLabels = 1; */ int getDeprecatedNodeLabelsCount(); /** * repeated string deprecatedNodeLabels = 1; */ java.lang.String getDeprecatedNodeLabels(int index); /** * repeated string deprecatedNodeLabels = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getDeprecatedNodeLabelsBytes(int index); /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ java.util.List getNodeLabelsList(); /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index); /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ int getNodeLabelsCount(); /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ java.util.List getNodeLabelsOrBuilderList(); /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsResponseProto} */ public static final class GetClusterNodeLabelsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeLabelsResponseProto) GetClusterNodeLabelsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodeLabelsResponseProto.newBuilder() to construct. private GetClusterNodeLabelsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodeLabelsResponseProto() { deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; nodeLabels_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodeLabelsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { deprecatedNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } deprecatedNodeLabels_.add(bs); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { nodeLabels_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } nodeLabels_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { deprecatedNodeLabels_ = deprecatedNodeLabels_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000002) != 0)) { nodeLabels_ = java.util.Collections.unmodifiableList(nodeLabels_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.Builder.class); } public static final int DEPRECATEDNODELABELS_FIELD_NUMBER = 1; private org.apache.hadoop.thirdparty.protobuf.LazyStringList deprecatedNodeLabels_; /** * repeated string deprecatedNodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getDeprecatedNodeLabelsList() { return deprecatedNodeLabels_; } /** * repeated string deprecatedNodeLabels = 1; */ public int getDeprecatedNodeLabelsCount() { return deprecatedNodeLabels_.size(); } /** * repeated string deprecatedNodeLabels = 1; */ public java.lang.String getDeprecatedNodeLabels(int index) { return deprecatedNodeLabels_.get(index); } /** * repeated string deprecatedNodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDeprecatedNodeLabelsBytes(int index) { return deprecatedNodeLabels_.getByteString(index); } public static final int NODELABELS_FIELD_NUMBER = 2; private java.util.List nodeLabels_; /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public java.util.List getNodeLabelsList() { return nodeLabels_; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public java.util.List getNodeLabelsOrBuilderList() { return nodeLabels_; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder( int index) { return nodeLabels_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < deprecatedNodeLabels_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, deprecatedNodeLabels_.getRaw(i)); } for (int i = 0; i < nodeLabels_.size(); i++) { output.writeMessage(2, nodeLabels_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < deprecatedNodeLabels_.size(); i++) { dataSize += computeStringSizeNoTag(deprecatedNodeLabels_.getRaw(i)); } size += dataSize; size += 1 * getDeprecatedNodeLabelsList().size(); } for (int i = 0; i < nodeLabels_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, nodeLabels_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto) obj; if (!getDeprecatedNodeLabelsList() .equals(other.getDeprecatedNodeLabelsList())) return false; if (!getNodeLabelsList() .equals(other.getNodeLabelsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDeprecatedNodeLabelsCount() > 0) { hash = (37 * hash) + DEPRECATEDNODELABELS_FIELD_NUMBER; hash = (53 * hash) + getDeprecatedNodeLabelsList().hashCode(); } if (getNodeLabelsCount() > 0) { hash = (37 * hash) + NODELABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeLabelsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeLabelsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeLabelsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (nodeLabelsBuilder_ == null) { nodeLabels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { nodeLabelsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { deprecatedNodeLabels_ = deprecatedNodeLabels_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.deprecatedNodeLabels_ = deprecatedNodeLabels_; if (nodeLabelsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { nodeLabels_ = java.util.Collections.unmodifiableList(nodeLabels_); bitField0_ = (bitField0_ & ~0x00000002); } result.nodeLabels_ = nodeLabels_; } else { result.nodeLabels_ = nodeLabelsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto.getDefaultInstance()) return this; if (!other.deprecatedNodeLabels_.isEmpty()) { if (deprecatedNodeLabels_.isEmpty()) { deprecatedNodeLabels_ = other.deprecatedNodeLabels_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDeprecatedNodeLabelsIsMutable(); deprecatedNodeLabels_.addAll(other.deprecatedNodeLabels_); } onChanged(); } if (nodeLabelsBuilder_ == null) { if (!other.nodeLabels_.isEmpty()) { if (nodeLabels_.isEmpty()) { nodeLabels_ = other.nodeLabels_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNodeLabelsIsMutable(); nodeLabels_.addAll(other.nodeLabels_); } onChanged(); } } else { if (!other.nodeLabels_.isEmpty()) { if (nodeLabelsBuilder_.isEmpty()) { nodeLabelsBuilder_.dispose(); nodeLabelsBuilder_ = null; nodeLabels_ = other.nodeLabels_; bitField0_ = (bitField0_ & ~0x00000002); nodeLabelsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeLabelsFieldBuilder() : null; } else { nodeLabelsBuilder_.addAllMessages(other.nodeLabels_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.thirdparty.protobuf.LazyStringList deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureDeprecatedNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { deprecatedNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(deprecatedNodeLabels_); bitField0_ |= 0x00000001; } } /** * repeated string deprecatedNodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getDeprecatedNodeLabelsList() { return deprecatedNodeLabels_.getUnmodifiableView(); } /** * repeated string deprecatedNodeLabels = 1; */ public int getDeprecatedNodeLabelsCount() { return deprecatedNodeLabels_.size(); } /** * repeated string deprecatedNodeLabels = 1; */ public java.lang.String getDeprecatedNodeLabels(int index) { return deprecatedNodeLabels_.get(index); } /** * repeated string deprecatedNodeLabels = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDeprecatedNodeLabelsBytes(int index) { return deprecatedNodeLabels_.getByteString(index); } /** * repeated string deprecatedNodeLabels = 1; */ public Builder setDeprecatedNodeLabels( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDeprecatedNodeLabelsIsMutable(); deprecatedNodeLabels_.set(index, value); onChanged(); return this; } /** * repeated string deprecatedNodeLabels = 1; */ public Builder addDeprecatedNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDeprecatedNodeLabelsIsMutable(); deprecatedNodeLabels_.add(value); onChanged(); return this; } /** * repeated string deprecatedNodeLabels = 1; */ public Builder addAllDeprecatedNodeLabels( java.lang.Iterable values) { ensureDeprecatedNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, deprecatedNodeLabels_); onChanged(); return this; } /** * repeated string deprecatedNodeLabels = 1; */ public Builder clearDeprecatedNodeLabels() { deprecatedNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string deprecatedNodeLabels = 1; */ public Builder addDeprecatedNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureDeprecatedNodeLabelsIsMutable(); deprecatedNodeLabels_.add(value); onChanged(); return this; } private java.util.List nodeLabels_ = java.util.Collections.emptyList(); private void ensureNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { nodeLabels_ = new java.util.ArrayList(nodeLabels_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> nodeLabelsBuilder_; /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public java.util.List getNodeLabelsList() { if (nodeLabelsBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeLabels_); } else { return nodeLabelsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public int getNodeLabelsCount() { if (nodeLabelsBuilder_ == null) { return nodeLabels_.size(); } else { return nodeLabelsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getNodeLabels(int index) { if (nodeLabelsBuilder_ == null) { return nodeLabels_.get(index); } else { return nodeLabelsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder setNodeLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) { if (nodeLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.set(index, value); onChanged(); } else { nodeLabelsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder setNodeLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) { if (nodeLabelsBuilder_ == null) { ensureNodeLabelsIsMutable(); nodeLabels_.set(index, builderForValue.build()); onChanged(); } else { nodeLabelsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder addNodeLabels(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) { if (nodeLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); } else { nodeLabelsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder addNodeLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto value) { if (nodeLabelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(index, value); onChanged(); } else { nodeLabelsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder addNodeLabels( org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) { if (nodeLabelsBuilder_ == null) { ensureNodeLabelsIsMutable(); nodeLabels_.add(builderForValue.build()); onChanged(); } else { nodeLabelsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder addNodeLabels( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder builderForValue) { if (nodeLabelsBuilder_ == null) { ensureNodeLabelsIsMutable(); nodeLabels_.add(index, builderForValue.build()); onChanged(); } else { nodeLabelsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder addAllNodeLabels( java.lang.Iterable values) { if (nodeLabelsBuilder_ == null) { ensureNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeLabels_); onChanged(); } else { nodeLabelsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder clearNodeLabels() { if (nodeLabelsBuilder_ == null) { nodeLabels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { nodeLabelsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public Builder removeNodeLabels(int index) { if (nodeLabelsBuilder_ == null) { ensureNodeLabelsIsMutable(); nodeLabels_.remove(index); onChanged(); } else { nodeLabelsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder getNodeLabelsBuilder( int index) { return getNodeLabelsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder getNodeLabelsOrBuilder( int index) { if (nodeLabelsBuilder_ == null) { return nodeLabels_.get(index); } else { return nodeLabelsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public java.util.List getNodeLabelsOrBuilderList() { if (nodeLabelsBuilder_ != null) { return nodeLabelsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeLabels_); } } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder() { return getNodeLabelsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder addNodeLabelsBuilder( int index) { return getNodeLabelsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeLabelProto nodeLabels = 2; */ public java.util.List getNodeLabelsBuilderList() { return getNodeLabelsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder> getNodeLabelsFieldBuilder() { if (nodeLabelsBuilder_ == null) { nodeLabelsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder>( nodeLabels_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); nodeLabels_ = null; } return nodeLabelsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeLabelsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeLabelsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodeLabelsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodeLabelsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodeAttributesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeAttributesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesRequestProto} */ public static final class GetClusterNodeAttributesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeAttributesRequestProto) GetClusterNodeAttributesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodeAttributesRequestProto.newBuilder() to construct. private GetClusterNodeAttributesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodeAttributesRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodeAttributesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeAttributesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeAttributesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeAttributesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodeAttributesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodeAttributesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetClusterNodeAttributesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetClusterNodeAttributesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ java.util.List getNodeAttributesList(); /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index); /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ int getNodeAttributesCount(); /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ java.util.List getNodeAttributesOrBuilderList(); /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesResponseProto} */ public static final class GetClusterNodeAttributesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetClusterNodeAttributesResponseProto) GetClusterNodeAttributesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetClusterNodeAttributesResponseProto.newBuilder() to construct. private GetClusterNodeAttributesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetClusterNodeAttributesResponseProto() { nodeAttributes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterNodeAttributesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeAttributes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.Builder.class); } public static final int NODEATTRIBUTES_FIELD_NUMBER = 1; private java.util.List nodeAttributes_; /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public java.util.List getNodeAttributesList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public java.util.List getNodeAttributesOrBuilderList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public int getNodeAttributesCount() { return nodeAttributes_.size(); } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index) { return nodeAttributes_.get(index); } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder( int index) { return nodeAttributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeAttributes_.size(); i++) { output.writeMessage(1, nodeAttributes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < nodeAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, nodeAttributes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto) obj; if (!getNodeAttributesList() .equals(other.getNodeAttributesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeAttributesCount() > 0) { hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getNodeAttributesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetClusterNodeAttributesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetClusterNodeAttributesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeAttributesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodeAttributesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto(this); int from_bitField0_ = bitField0_; if (nodeAttributesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeAttributes_ = nodeAttributes_; } else { result.nodeAttributes_ = nodeAttributesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto.getDefaultInstance()) return this; if (nodeAttributesBuilder_ == null) { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributes_.isEmpty()) { nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeAttributesIsMutable(); nodeAttributes_.addAll(other.nodeAttributes_); } onChanged(); } } else { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributesBuilder_.isEmpty()) { nodeAttributesBuilder_.dispose(); nodeAttributesBuilder_ = null; nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000001); nodeAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeAttributesFieldBuilder() : null; } else { nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodeAttributes_ = java.util.Collections.emptyList(); private void ensureNodeAttributesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = new java.util.ArrayList(nodeAttributes_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> nodeAttributesBuilder_; /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public java.util.List getNodeAttributesList() { if (nodeAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeAttributes_); } else { return nodeAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public int getNodeAttributesCount() { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.size(); } else { return nodeAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, value); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(value); onChanged(); } else { nodeAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, value); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder addNodeAttributes( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder addAllNodeAttributes( java.lang.Iterable values) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeAttributes_); onChanged(); } else { nodeAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder clearNodeAttributes() { if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodeAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public Builder removeNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.remove(index); onChanged(); } else { nodeAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder getNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder getNodeAttributesOrBuilder( int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public java.util.List getNodeAttributesOrBuilderList() { if (nodeAttributesBuilder_ != null) { return nodeAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeAttributes_); } } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder addNodeAttributesBuilder() { return getNodeAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder addNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeInfoProto nodeAttributes = 1; */ public java.util.List getNodeAttributesBuilderList() { return getNodeAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder> getNodeAttributesFieldBuilder() { if (nodeAttributesBuilder_ == null) { nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder>( nodeAttributes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); nodeAttributes_ = null; } return nodeAttributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetClusterNodeAttributesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetClusterNodeAttributesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetClusterNodeAttributesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetClusterNodeAttributesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAttributesToNodesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAttributesToNodesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ java.util.List getNodeAttributesList(); /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index); /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ int getNodeAttributesCount(); /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ java.util.List getNodeAttributesOrBuilderList(); /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetAttributesToNodesRequestProto} */ public static final class GetAttributesToNodesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAttributesToNodesRequestProto) GetAttributesToNodesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAttributesToNodesRequestProto.newBuilder() to construct. private GetAttributesToNodesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAttributesToNodesRequestProto() { nodeAttributes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAttributesToNodesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodeAttributes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.Builder.class); } public static final int NODEATTRIBUTES_FIELD_NUMBER = 1; private java.util.List nodeAttributes_; /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public java.util.List getNodeAttributesList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public java.util.List getNodeAttributesOrBuilderList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public int getNodeAttributesCount() { return nodeAttributes_.size(); } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index) { return nodeAttributes_.get(index); } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder( int index) { return nodeAttributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodeAttributes_.size(); i++) { output.writeMessage(1, nodeAttributes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < nodeAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, nodeAttributes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto) obj; if (!getNodeAttributesList() .equals(other.getNodeAttributesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodeAttributesCount() > 0) { hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getNodeAttributesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAttributesToNodesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAttributesToNodesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeAttributesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodeAttributesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto(this); int from_bitField0_ = bitField0_; if (nodeAttributesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodeAttributes_ = nodeAttributes_; } else { result.nodeAttributes_ = nodeAttributesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto.getDefaultInstance()) return this; if (nodeAttributesBuilder_ == null) { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributes_.isEmpty()) { nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodeAttributesIsMutable(); nodeAttributes_.addAll(other.nodeAttributes_); } onChanged(); } } else { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributesBuilder_.isEmpty()) { nodeAttributesBuilder_.dispose(); nodeAttributesBuilder_ = null; nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000001); nodeAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeAttributesFieldBuilder() : null; } else { nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodeAttributes_ = java.util.Collections.emptyList(); private void ensureNodeAttributesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodeAttributes_ = new java.util.ArrayList(nodeAttributes_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> nodeAttributesBuilder_; /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public java.util.List getNodeAttributesList() { if (nodeAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeAttributes_); } else { return nodeAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public int getNodeAttributesCount() { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.size(); } else { return nodeAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, value); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(value); onChanged(); } else { nodeAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, value); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder addNodeAttributes( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder addAllNodeAttributes( java.lang.Iterable values) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeAttributes_); onChanged(); } else { nodeAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder clearNodeAttributes() { if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodeAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public Builder removeNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.remove(index); onChanged(); } else { nodeAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributesOrBuilder( int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public java.util.List getNodeAttributesOrBuilderList() { if (nodeAttributesBuilder_ != null) { return nodeAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeAttributes_); } } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder addNodeAttributesBuilder() { return getNodeAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder addNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeKeyProto nodeAttributes = 1; */ public java.util.List getNodeAttributesBuilderList() { return getNodeAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> getNodeAttributesFieldBuilder() { if (nodeAttributesBuilder_ == null) { nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>( nodeAttributes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); nodeAttributes_ = null; } return nodeAttributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAttributesToNodesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAttributesToNodesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAttributesToNodesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAttributesToNodesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAttributesToNodesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAttributesToNodesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ java.util.List getAttributesToNodesList(); /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index); /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ int getAttributesToNodesCount(); /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ java.util.List getAttributesToNodesOrBuilderList(); /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetAttributesToNodesResponseProto} */ public static final class GetAttributesToNodesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAttributesToNodesResponseProto) GetAttributesToNodesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAttributesToNodesResponseProto.newBuilder() to construct. private GetAttributesToNodesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAttributesToNodesResponseProto() { attributesToNodes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAttributesToNodesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { attributesToNodes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } attributesToNodes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { attributesToNodes_ = java.util.Collections.unmodifiableList(attributesToNodes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.Builder.class); } public static final int ATTRIBUTESTONODES_FIELD_NUMBER = 1; private java.util.List attributesToNodes_; /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public java.util.List getAttributesToNodesList() { return attributesToNodes_; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public java.util.List getAttributesToNodesOrBuilderList() { return attributesToNodes_; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public int getAttributesToNodesCount() { return attributesToNodes_.size(); } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index) { return attributesToNodes_.get(index); } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder( int index) { return attributesToNodes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getAttributesToNodesCount(); i++) { if (!getAttributesToNodes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < attributesToNodes_.size(); i++) { output.writeMessage(1, attributesToNodes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < attributesToNodes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, attributesToNodes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto) obj; if (!getAttributesToNodesList() .equals(other.getAttributesToNodesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAttributesToNodesCount() > 0) { hash = (37 * hash) + ATTRIBUTESTONODES_FIELD_NUMBER; hash = (53 * hash) + getAttributesToNodesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAttributesToNodesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAttributesToNodesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAttributesToNodesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (attributesToNodesBuilder_ == null) { attributesToNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { attributesToNodesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto(this); int from_bitField0_ = bitField0_; if (attributesToNodesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { attributesToNodes_ = java.util.Collections.unmodifiableList(attributesToNodes_); bitField0_ = (bitField0_ & ~0x00000001); } result.attributesToNodes_ = attributesToNodes_; } else { result.attributesToNodes_ = attributesToNodesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto.getDefaultInstance()) return this; if (attributesToNodesBuilder_ == null) { if (!other.attributesToNodes_.isEmpty()) { if (attributesToNodes_.isEmpty()) { attributesToNodes_ = other.attributesToNodes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAttributesToNodesIsMutable(); attributesToNodes_.addAll(other.attributesToNodes_); } onChanged(); } } else { if (!other.attributesToNodes_.isEmpty()) { if (attributesToNodesBuilder_.isEmpty()) { attributesToNodesBuilder_.dispose(); attributesToNodesBuilder_ = null; attributesToNodes_ = other.attributesToNodes_; bitField0_ = (bitField0_ & ~0x00000001); attributesToNodesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesToNodesFieldBuilder() : null; } else { attributesToNodesBuilder_.addAllMessages(other.attributesToNodes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getAttributesToNodesCount(); i++) { if (!getAttributesToNodes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List attributesToNodes_ = java.util.Collections.emptyList(); private void ensureAttributesToNodesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { attributesToNodes_ = new java.util.ArrayList(attributesToNodes_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> attributesToNodesBuilder_; /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public java.util.List getAttributesToNodesList() { if (attributesToNodesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributesToNodes_); } else { return attributesToNodesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public int getAttributesToNodesCount() { if (attributesToNodesBuilder_ == null) { return attributesToNodes_.size(); } else { return attributesToNodesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getAttributesToNodes(int index) { if (attributesToNodesBuilder_ == null) { return attributesToNodes_.get(index); } else { return attributesToNodesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder setAttributesToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) { if (attributesToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesToNodesIsMutable(); attributesToNodes_.set(index, value); onChanged(); } else { attributesToNodesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder setAttributesToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) { if (attributesToNodesBuilder_ == null) { ensureAttributesToNodesIsMutable(); attributesToNodes_.set(index, builderForValue.build()); onChanged(); } else { attributesToNodesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder addAttributesToNodes(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) { if (attributesToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesToNodesIsMutable(); attributesToNodes_.add(value); onChanged(); } else { attributesToNodesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder addAttributesToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto value) { if (attributesToNodesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesToNodesIsMutable(); attributesToNodes_.add(index, value); onChanged(); } else { attributesToNodesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder addAttributesToNodes( org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) { if (attributesToNodesBuilder_ == null) { ensureAttributesToNodesIsMutable(); attributesToNodes_.add(builderForValue.build()); onChanged(); } else { attributesToNodesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder addAttributesToNodes( int index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder builderForValue) { if (attributesToNodesBuilder_ == null) { ensureAttributesToNodesIsMutable(); attributesToNodes_.add(index, builderForValue.build()); onChanged(); } else { attributesToNodesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder addAllAttributesToNodes( java.lang.Iterable values) { if (attributesToNodesBuilder_ == null) { ensureAttributesToNodesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, attributesToNodes_); onChanged(); } else { attributesToNodesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder clearAttributesToNodes() { if (attributesToNodesBuilder_ == null) { attributesToNodes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { attributesToNodesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public Builder removeAttributesToNodes(int index) { if (attributesToNodesBuilder_ == null) { ensureAttributesToNodesIsMutable(); attributesToNodes_.remove(index); onChanged(); } else { attributesToNodesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder getAttributesToNodesBuilder( int index) { return getAttributesToNodesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder getAttributesToNodesOrBuilder( int index) { if (attributesToNodesBuilder_ == null) { return attributesToNodes_.get(index); } else { return attributesToNodesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public java.util.List getAttributesToNodesOrBuilderList() { if (attributesToNodesBuilder_ != null) { return attributesToNodesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributesToNodes_); } } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder addAttributesToNodesBuilder() { return getAttributesToNodesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder addAttributesToNodesBuilder( int index) { return getAttributesToNodesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.AttributeToNodesProto attributesToNodes = 1; */ public java.util.List getAttributesToNodesBuilderList() { return getAttributesToNodesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder> getAttributesToNodesFieldBuilder() { if (attributesToNodesBuilder_ == null) { attributesToNodesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder>( attributesToNodes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); attributesToNodes_ = null; } return attributesToNodesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAttributesToNodesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAttributesToNodesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAttributesToNodesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAttributesToNodesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNodesToAttributesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToAttributesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string hostnames = 1; */ java.util.List getHostnamesList(); /** * repeated string hostnames = 1; */ int getHostnamesCount(); /** * repeated string hostnames = 1; */ java.lang.String getHostnames(int index); /** * repeated string hostnames = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostnamesBytes(int index); } /** * Protobuf type {@code hadoop.yarn.GetNodesToAttributesRequestProto} */ public static final class GetNodesToAttributesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToAttributesRequestProto) GetNodesToAttributesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNodesToAttributesRequestProto.newBuilder() to construct. private GetNodesToAttributesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNodesToAttributesRequestProto() { hostnames_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNodesToAttributesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { hostnames_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } hostnames_.add(bs); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { hostnames_ = hostnames_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.Builder.class); } public static final int HOSTNAMES_FIELD_NUMBER = 1; private org.apache.hadoop.thirdparty.protobuf.LazyStringList hostnames_; /** * repeated string hostnames = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getHostnamesList() { return hostnames_; } /** * repeated string hostnames = 1; */ public int getHostnamesCount() { return hostnames_.size(); } /** * repeated string hostnames = 1; */ public java.lang.String getHostnames(int index) { return hostnames_.get(index); } /** * repeated string hostnames = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getHostnamesBytes(int index) { return hostnames_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < hostnames_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, hostnames_.getRaw(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < hostnames_.size(); i++) { dataSize += computeStringSizeNoTag(hostnames_.getRaw(i)); } size += dataSize; size += 1 * getHostnamesList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto) obj; if (!getHostnamesList() .equals(other.getHostnamesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getHostnamesCount() > 0) { hash = (37 * hash) + HOSTNAMES_FIELD_NUMBER; hash = (53 * hash) + getHostnamesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNodesToAttributesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToAttributesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); hostnames_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { hostnames_ = hostnames_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.hostnames_ = hostnames_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto.getDefaultInstance()) return this; if (!other.hostnames_.isEmpty()) { if (hostnames_.isEmpty()) { hostnames_ = other.hostnames_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureHostnamesIsMutable(); hostnames_.addAll(other.hostnames_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.thirdparty.protobuf.LazyStringList hostnames_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureHostnamesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { hostnames_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(hostnames_); bitField0_ |= 0x00000001; } } /** * repeated string hostnames = 1; */ public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getHostnamesList() { return hostnames_.getUnmodifiableView(); } /** * repeated string hostnames = 1; */ public int getHostnamesCount() { return hostnames_.size(); } /** * repeated string hostnames = 1; */ public java.lang.String getHostnames(int index) { return hostnames_.get(index); } /** * repeated string hostnames = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getHostnamesBytes(int index) { return hostnames_.getByteString(index); } /** * repeated string hostnames = 1; */ public Builder setHostnames( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureHostnamesIsMutable(); hostnames_.set(index, value); onChanged(); return this; } /** * repeated string hostnames = 1; */ public Builder addHostnames( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureHostnamesIsMutable(); hostnames_.add(value); onChanged(); return this; } /** * repeated string hostnames = 1; */ public Builder addAllHostnames( java.lang.Iterable values) { ensureHostnamesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, hostnames_); onChanged(); return this; } /** * repeated string hostnames = 1; */ public Builder clearHostnames() { hostnames_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string hostnames = 1; */ public Builder addHostnamesBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureHostnamesIsMutable(); hostnames_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToAttributesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToAttributesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNodesToAttributesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNodesToAttributesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNodesToAttributesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNodesToAttributesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ java.util.List getNodesToAttributesList(); /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index); /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ int getNodesToAttributesCount(); /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ java.util.List getNodesToAttributesOrBuilderList(); /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetNodesToAttributesResponseProto} */ public static final class GetNodesToAttributesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNodesToAttributesResponseProto) GetNodesToAttributesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNodesToAttributesResponseProto.newBuilder() to construct. private GetNodesToAttributesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNodesToAttributesResponseProto() { nodesToAttributes_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNodesToAttributesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { nodesToAttributes_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } nodesToAttributes_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { nodesToAttributes_ = java.util.Collections.unmodifiableList(nodesToAttributes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.Builder.class); } public static final int NODESTOATTRIBUTES_FIELD_NUMBER = 1; private java.util.List nodesToAttributes_; /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public java.util.List getNodesToAttributesList() { return nodesToAttributes_; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public java.util.List getNodesToAttributesOrBuilderList() { return nodesToAttributes_; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public int getNodesToAttributesCount() { return nodesToAttributes_.size(); } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index) { return nodesToAttributes_.get(index); } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder( int index) { return nodesToAttributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodesToAttributesCount(); i++) { if (!getNodesToAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < nodesToAttributes_.size(); i++) { output.writeMessage(1, nodesToAttributes_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < nodesToAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, nodesToAttributes_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto) obj; if (!getNodesToAttributesList() .equals(other.getNodesToAttributesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNodesToAttributesCount() > 0) { hash = (37 * hash) + NODESTOATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getNodesToAttributesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNodesToAttributesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNodesToAttributesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodesToAttributesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (nodesToAttributesBuilder_ == null) { nodesToAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { nodesToAttributesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto(this); int from_bitField0_ = bitField0_; if (nodesToAttributesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { nodesToAttributes_ = java.util.Collections.unmodifiableList(nodesToAttributes_); bitField0_ = (bitField0_ & ~0x00000001); } result.nodesToAttributes_ = nodesToAttributes_; } else { result.nodesToAttributes_ = nodesToAttributesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto.getDefaultInstance()) return this; if (nodesToAttributesBuilder_ == null) { if (!other.nodesToAttributes_.isEmpty()) { if (nodesToAttributes_.isEmpty()) { nodesToAttributes_ = other.nodesToAttributes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNodesToAttributesIsMutable(); nodesToAttributes_.addAll(other.nodesToAttributes_); } onChanged(); } } else { if (!other.nodesToAttributes_.isEmpty()) { if (nodesToAttributesBuilder_.isEmpty()) { nodesToAttributesBuilder_.dispose(); nodesToAttributesBuilder_ = null; nodesToAttributes_ = other.nodesToAttributes_; bitField0_ = (bitField0_ & ~0x00000001); nodesToAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodesToAttributesFieldBuilder() : null; } else { nodesToAttributesBuilder_.addAllMessages(other.nodesToAttributes_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodesToAttributesCount(); i++) { if (!getNodesToAttributes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List nodesToAttributes_ = java.util.Collections.emptyList(); private void ensureNodesToAttributesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { nodesToAttributes_ = new java.util.ArrayList(nodesToAttributes_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> nodesToAttributesBuilder_; /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public java.util.List getNodesToAttributesList() { if (nodesToAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(nodesToAttributes_); } else { return nodesToAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public int getNodesToAttributesCount() { if (nodesToAttributesBuilder_ == null) { return nodesToAttributes_.size(); } else { return nodesToAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getNodesToAttributes(int index) { if (nodesToAttributesBuilder_ == null) { return nodesToAttributes_.get(index); } else { return nodesToAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder setNodesToAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) { if (nodesToAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodesToAttributesIsMutable(); nodesToAttributes_.set(index, value); onChanged(); } else { nodesToAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder setNodesToAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) { if (nodesToAttributesBuilder_ == null) { ensureNodesToAttributesIsMutable(); nodesToAttributes_.set(index, builderForValue.build()); onChanged(); } else { nodesToAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder addNodesToAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) { if (nodesToAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodesToAttributesIsMutable(); nodesToAttributes_.add(value); onChanged(); } else { nodesToAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder addNodesToAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto value) { if (nodesToAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodesToAttributesIsMutable(); nodesToAttributes_.add(index, value); onChanged(); } else { nodesToAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder addNodesToAttributes( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) { if (nodesToAttributesBuilder_ == null) { ensureNodesToAttributesIsMutable(); nodesToAttributes_.add(builderForValue.build()); onChanged(); } else { nodesToAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder addNodesToAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder builderForValue) { if (nodesToAttributesBuilder_ == null) { ensureNodesToAttributesIsMutable(); nodesToAttributes_.add(index, builderForValue.build()); onChanged(); } else { nodesToAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder addAllNodesToAttributes( java.lang.Iterable values) { if (nodesToAttributesBuilder_ == null) { ensureNodesToAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodesToAttributes_); onChanged(); } else { nodesToAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder clearNodesToAttributes() { if (nodesToAttributesBuilder_ == null) { nodesToAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { nodesToAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public Builder removeNodesToAttributes(int index) { if (nodesToAttributesBuilder_ == null) { ensureNodesToAttributesIsMutable(); nodesToAttributes_.remove(index); onChanged(); } else { nodesToAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder getNodesToAttributesBuilder( int index) { return getNodesToAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder getNodesToAttributesOrBuilder( int index) { if (nodesToAttributesBuilder_ == null) { return nodesToAttributes_.get(index); } else { return nodesToAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public java.util.List getNodesToAttributesOrBuilderList() { if (nodesToAttributesBuilder_ != null) { return nodesToAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodesToAttributes_); } } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodesToAttributesBuilder() { return getNodesToAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder addNodesToAttributesBuilder( int index) { return getNodesToAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeToAttributesProto nodesToAttributes = 1; */ public java.util.List getNodesToAttributesBuilderList() { return getNodesToAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder> getNodesToAttributesFieldBuilder() { if (nodesToAttributesBuilder_ == null) { nodesToAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder>( nodesToAttributes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); nodesToAttributes_ = null; } return nodesToAttributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNodesToAttributesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNodesToAttributesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNodesToAttributesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNodesToAttributesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateApplicationPriorityRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationPriorityRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ boolean hasApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ boolean hasApplicationPriority(); /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority(); /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityRequestProto} */ public static final class UpdateApplicationPriorityRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationPriorityRequestProto) UpdateApplicationPriorityRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApplicationPriorityRequestProto.newBuilder() to construct. private UpdateApplicationPriorityRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateApplicationPriorityRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateApplicationPriorityRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = applicationPriority_.toBuilder(); } applicationPriority_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationPriority_); applicationPriority_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATIONID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int APPLICATIONPRIORITY_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasApplicationId()) { memoizedIsInitialized = 0; return false; } if (!hasApplicationPriority()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getApplicationPriority()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getApplicationPriority()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasApplicationPriority() != other.hasApplicationPriority()) return false; if (hasApplicationPriority()) { if (!getApplicationPriority() .equals(other.getApplicationPriority())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasApplicationPriority()) { hash = (37 * hash) + APPLICATIONPRIORITY_FIELD_NUMBER; hash = (53 * hash) + getApplicationPriority().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationPriorityRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); getApplicationPriorityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (applicationPriorityBuilder_ == null) { result.applicationPriority_ = applicationPriority_; } else { result.applicationPriority_ = applicationPriorityBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasApplicationPriority()) { mergeApplicationPriority(other.getApplicationPriority()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasApplicationId()) { return false; } if (!hasApplicationPriority()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_; /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { if (applicationPriorityBuilder_ == null) { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } else { return applicationPriorityBuilder_.getMessage(); } } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationPriority_ = value; onChanged(); } else { applicationPriorityBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public Builder setApplicationPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (applicationPriorityBuilder_ == null) { applicationPriority_ = builderForValue.build(); onChanged(); } else { applicationPriorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && applicationPriority_ != null && applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { applicationPriority_ = org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder(applicationPriority_).mergeFrom(value).buildPartial(); } else { applicationPriority_ = value; } onChanged(); } else { applicationPriorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public Builder clearApplicationPriority() { if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; onChanged(); } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() { bitField0_ |= 0x00000002; onChanged(); return getApplicationPriorityFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { if (applicationPriorityBuilder_ != null) { return applicationPriorityBuilder_.getMessageOrBuilder(); } else { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } } /** * required .hadoop.yarn.PriorityProto applicationPriority = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getApplicationPriorityFieldBuilder() { if (applicationPriorityBuilder_ == null) { applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getApplicationPriority(), getParentForChildren(), isClean()); applicationPriority_ = null; } return applicationPriorityBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationPriorityRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationPriorityRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateApplicationPriorityRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateApplicationPriorityRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateApplicationPriorityResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationPriorityResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ boolean hasApplicationPriority(); /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority(); /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityResponseProto} */ public static final class UpdateApplicationPriorityResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationPriorityResponseProto) UpdateApplicationPriorityResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApplicationPriorityResponseProto.newBuilder() to construct. private UpdateApplicationPriorityResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateApplicationPriorityResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateApplicationPriorityResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationPriority_.toBuilder(); } applicationPriority_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationPriority_); applicationPriority_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.Builder.class); } private int bitField0_; public static final int APPLICATIONPRIORITY_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationPriority()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationPriority()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto) obj; if (hasApplicationPriority() != other.hasApplicationPriority()) return false; if (hasApplicationPriority()) { if (!getApplicationPriority() .equals(other.getApplicationPriority())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationPriority()) { hash = (37 * hash) + APPLICATIONPRIORITY_FIELD_NUMBER; hash = (53 * hash) + getApplicationPriority().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationPriorityResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationPriorityResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationPriorityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationPriorityBuilder_ == null) { result.applicationPriority_ = applicationPriority_; } else { result.applicationPriority_ = applicationPriorityBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto.getDefaultInstance()) return this; if (other.hasApplicationPriority()) { mergeApplicationPriority(other.getApplicationPriority()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto applicationPriority_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> applicationPriorityBuilder_; /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public boolean hasApplicationPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getApplicationPriority() { if (applicationPriorityBuilder_ == null) { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } else { return applicationPriorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public Builder setApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationPriority_ = value; onChanged(); } else { applicationPriorityBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public Builder setApplicationPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (applicationPriorityBuilder_ == null) { applicationPriority_ = builderForValue.build(); onChanged(); } else { applicationPriorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public Builder mergeApplicationPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (applicationPriorityBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationPriority_ != null && applicationPriority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { applicationPriority_ = org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder(applicationPriority_).mergeFrom(value).buildPartial(); } else { applicationPriority_ = value; } onChanged(); } else { applicationPriorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public Builder clearApplicationPriority() { if (applicationPriorityBuilder_ == null) { applicationPriority_ = null; onChanged(); } else { applicationPriorityBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getApplicationPriorityBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getApplicationPriorityOrBuilder() { if (applicationPriorityBuilder_ != null) { return applicationPriorityBuilder_.getMessageOrBuilder(); } else { return applicationPriority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : applicationPriority_; } } /** * optional .hadoop.yarn.PriorityProto applicationPriority = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getApplicationPriorityFieldBuilder() { if (applicationPriorityBuilder_ == null) { applicationPriorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getApplicationPriority(), getParentForChildren(), isClean()); applicationPriority_ = null; } return applicationPriorityBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationPriorityResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationPriorityResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateApplicationPriorityResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateApplicationPriorityResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SignalContainerRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SignalContainerRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ boolean hasCommand(); /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand(); } /** * Protobuf type {@code hadoop.yarn.SignalContainerRequestProto} */ public static final class SignalContainerRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SignalContainerRequestProto) SignalContainerRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SignalContainerRequestProto.newBuilder() to construct. private SignalContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SignalContainerRequestProto() { command_ = 1; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SignalContainerRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto value = org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; command_ = rawValue; } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int COMMAND_FIELD_NUMBER = 2; private int command_; /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public boolean hasCommand() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.valueOf(command_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.OUTPUT_THREAD_DUMP : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasContainerId()) { memoizedIsInitialized = 0; return false; } if (!hasCommand()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeEnum(2, command_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, command_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasCommand() != other.hasCommand()) return false; if (hasCommand()) { if (command_ != other.command_) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasCommand()) { hash = (37 * hash) + COMMAND_FIELD_NUMBER; hash = (53 * hash) + command_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SignalContainerRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SignalContainerRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); command_ = 1; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.command_ = command_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasCommand()) { setCommand(other.getCommand()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasContainerId()) { return false; } if (!hasCommand()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * required .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private int command_ = 1; /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public boolean hasCommand() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto getCommand() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto result = org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.valueOf(command_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto.OUTPUT_THREAD_DUMP : result; } /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public Builder setCommand(org.apache.hadoop.yarn.proto.YarnProtos.SignalContainerCommandProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; command_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.SignalContainerCommandProto command = 2; */ public Builder clearCommand() { bitField0_ = (bitField0_ & ~0x00000002); command_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SignalContainerRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SignalContainerRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SignalContainerRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new SignalContainerRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SignalContainerResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SignalContainerResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.SignalContainerResponseProto} */ public static final class SignalContainerResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SignalContainerResponseProto) SignalContainerResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SignalContainerResponseProto.newBuilder() to construct. private SignalContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SignalContainerResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SignalContainerResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SignalContainerResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SignalContainerResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SignalContainerResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SignalContainerResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SignalContainerResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new SignalContainerResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateApplicationTimeoutsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationTimeoutsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ boolean hasApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ java.util.List getApplicationTimeoutsList(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ int getApplicationTimeoutsCount(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ java.util.List getApplicationTimeoutsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsRequestProto} */ public static final class UpdateApplicationTimeoutsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationTimeoutsRequestProto) UpdateApplicationTimeoutsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApplicationTimeoutsRequestProto.newBuilder() to construct. private UpdateApplicationTimeoutsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateApplicationTimeoutsRequestProto() { applicationTimeouts_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateApplicationTimeoutsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { applicationTimeouts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } applicationTimeouts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATIONID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 2; private java.util.List applicationTimeouts_; /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public java.util.List getApplicationTimeoutsList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public java.util.List getApplicationTimeoutsOrBuilderList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public int getApplicationTimeoutsCount() { return applicationTimeouts_.size(); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) { return applicationTimeouts_.get(index); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { return applicationTimeouts_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasApplicationId()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } for (int i = 0; i < applicationTimeouts_.size(); i++) { output.writeMessage(2, applicationTimeouts_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } for (int i = 0; i < applicationTimeouts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, applicationTimeouts_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (!getApplicationTimeoutsList() .equals(other.getApplicationTimeoutsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (getApplicationTimeoutsCount() > 0) { hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTimeoutsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationTimeoutsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); getApplicationTimeoutsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { applicationTimeoutsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (applicationTimeoutsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_); bitField0_ = (bitField0_ & ~0x00000002); } result.applicationTimeouts_ = applicationTimeouts_; } else { result.applicationTimeouts_ = applicationTimeoutsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (applicationTimeoutsBuilder_ == null) { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeouts_.isEmpty()) { applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.addAll(other.applicationTimeouts_); } onChanged(); } } else { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeoutsBuilder_.isEmpty()) { applicationTimeoutsBuilder_.dispose(); applicationTimeoutsBuilder_ = null; applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00000002); applicationTimeoutsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationTimeoutsFieldBuilder() : null; } else { applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasApplicationId()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * required .hadoop.yarn.ApplicationIdProto applicationId = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.util.List applicationTimeouts_ = java.util.Collections.emptyList(); private void ensureApplicationTimeoutsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { applicationTimeouts_ = new java.util.ArrayList(applicationTimeouts_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_; /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public java.util.List getApplicationTimeoutsList() { if (applicationTimeoutsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationTimeouts_); } else { return applicationTimeoutsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public int getApplicationTimeoutsCount() { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.size(); } else { return applicationTimeoutsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, value); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder addApplicationTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder addApplicationTimeouts( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder addAllApplicationTimeouts( java.lang.Iterable values) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTimeouts_); onChanged(); } else { applicationTimeoutsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder clearApplicationTimeouts() { if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { applicationTimeoutsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public Builder removeApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.remove(index); onChanged(); } else { applicationTimeoutsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder getApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public java.util.List getApplicationTimeoutsOrBuilderList() { if (applicationTimeoutsBuilder_ != null) { return applicationTimeoutsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationTimeouts_); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder() { return getApplicationTimeoutsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 2; */ public java.util.List getApplicationTimeoutsBuilderList() { return getApplicationTimeoutsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> getApplicationTimeoutsFieldBuilder() { if (applicationTimeoutsBuilder_ == null) { applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder>( applicationTimeouts_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); applicationTimeouts_ = null; } return applicationTimeoutsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationTimeoutsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationTimeoutsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateApplicationTimeoutsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateApplicationTimeoutsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UpdateApplicationTimeoutsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UpdateApplicationTimeoutsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ java.util.List getApplicationTimeoutsList(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ int getApplicationTimeoutsCount(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ java.util.List getApplicationTimeoutsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsResponseProto} */ public static final class UpdateApplicationTimeoutsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UpdateApplicationTimeoutsResponseProto) UpdateApplicationTimeoutsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApplicationTimeoutsResponseProto.newBuilder() to construct. private UpdateApplicationTimeoutsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UpdateApplicationTimeoutsResponseProto() { applicationTimeouts_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateApplicationTimeoutsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { applicationTimeouts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } applicationTimeouts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.Builder.class); } public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 1; private java.util.List applicationTimeouts_; /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public java.util.List getApplicationTimeoutsList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public java.util.List getApplicationTimeoutsOrBuilderList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public int getApplicationTimeoutsCount() { return applicationTimeouts_.size(); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) { return applicationTimeouts_.get(index); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { return applicationTimeouts_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < applicationTimeouts_.size(); i++) { output.writeMessage(1, applicationTimeouts_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < applicationTimeouts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, applicationTimeouts_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto) obj; if (!getApplicationTimeoutsList() .equals(other.getApplicationTimeoutsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getApplicationTimeoutsCount() > 0) { hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTimeoutsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UpdateApplicationTimeoutsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UpdateApplicationTimeoutsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationTimeoutsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { applicationTimeoutsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto(this); int from_bitField0_ = bitField0_; if (applicationTimeoutsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_); bitField0_ = (bitField0_ & ~0x00000001); } result.applicationTimeouts_ = applicationTimeouts_; } else { result.applicationTimeouts_ = applicationTimeoutsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto.getDefaultInstance()) return this; if (applicationTimeoutsBuilder_ == null) { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeouts_.isEmpty()) { applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.addAll(other.applicationTimeouts_); } onChanged(); } } else { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeoutsBuilder_.isEmpty()) { applicationTimeoutsBuilder_.dispose(); applicationTimeoutsBuilder_ = null; applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00000001); applicationTimeoutsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationTimeoutsFieldBuilder() : null; } else { applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List applicationTimeouts_ = java.util.Collections.emptyList(); private void ensureApplicationTimeoutsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { applicationTimeouts_ = new java.util.ArrayList(applicationTimeouts_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_; /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public java.util.List getApplicationTimeoutsList() { if (applicationTimeoutsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationTimeouts_); } else { return applicationTimeoutsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public int getApplicationTimeoutsCount() { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.size(); } else { return applicationTimeoutsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, value); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder addApplicationTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder addApplicationTimeouts( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder addAllApplicationTimeouts( java.lang.Iterable values) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTimeouts_); onChanged(); } else { applicationTimeoutsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder clearApplicationTimeouts() { if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { applicationTimeoutsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public Builder removeApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.remove(index); onChanged(); } else { applicationTimeoutsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder getApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public java.util.List getApplicationTimeoutsOrBuilderList() { if (applicationTimeoutsBuilder_ != null) { return applicationTimeoutsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationTimeouts_); } } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder() { return getApplicationTimeoutsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder addApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationUpdateTimeoutMapProto application_timeouts = 1; */ public java.util.List getApplicationTimeoutsBuilderList() { return getApplicationTimeoutsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder> getApplicationTimeoutsFieldBuilder() { if (applicationTimeoutsBuilder_ == null) { applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder>( applicationTimeouts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); applicationTimeouts_ = null; } return applicationTimeoutsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UpdateApplicationTimeoutsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UpdateApplicationTimeoutsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UpdateApplicationTimeoutsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UpdateApplicationTimeoutsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAllResourceProfilesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceProfilesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesRequestProto} */ public static final class GetAllResourceProfilesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceProfilesRequestProto) GetAllResourceProfilesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAllResourceProfilesRequestProto.newBuilder() to construct. private GetAllResourceProfilesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAllResourceProfilesRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAllResourceProfilesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceProfilesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceProfilesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceProfilesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAllResourceProfilesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAllResourceProfilesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAllResourceProfilesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceProfilesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ boolean hasResourceProfiles(); /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles(); /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesResponseProto} */ public static final class GetAllResourceProfilesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceProfilesResponseProto) GetAllResourceProfilesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAllResourceProfilesResponseProto.newBuilder() to construct. private GetAllResourceProfilesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAllResourceProfilesResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAllResourceProfilesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = resourceProfiles_.toBuilder(); } resourceProfiles_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(resourceProfiles_); resourceProfiles_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.Builder.class); } private int bitField0_; public static final int RESOURCE_PROFILES_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_; /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public boolean hasResourceProfiles() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasResourceProfiles()) { memoizedIsInitialized = 0; return false; } if (!getResourceProfiles().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getResourceProfiles()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getResourceProfiles()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto) obj; if (hasResourceProfiles() != other.hasResourceProfiles()) return false; if (hasResourceProfiles()) { if (!getResourceProfiles() .equals(other.getResourceProfiles())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResourceProfiles()) { hash = (37 * hash) + RESOURCE_PROFILES_FIELD_NUMBER; hash = (53 * hash) + getResourceProfiles().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAllResourceProfilesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceProfilesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceProfilesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (resourceProfilesBuilder_ == null) { resourceProfiles_ = null; } else { resourceProfilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (resourceProfilesBuilder_ == null) { result.resourceProfiles_ = resourceProfiles_; } else { result.resourceProfiles_ = resourceProfilesBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto.getDefaultInstance()) return this; if (other.hasResourceProfiles()) { mergeResourceProfiles(other.getResourceProfiles()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasResourceProfiles()) { return false; } if (!getResourceProfiles().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto resourceProfiles_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> resourceProfilesBuilder_; /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public boolean hasResourceProfiles() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getResourceProfiles() { if (resourceProfilesBuilder_ == null) { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } else { return resourceProfilesBuilder_.getMessage(); } } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public Builder setResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) { if (resourceProfilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resourceProfiles_ = value; onChanged(); } else { resourceProfilesBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public Builder setResourceProfiles( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder builderForValue) { if (resourceProfilesBuilder_ == null) { resourceProfiles_ = builderForValue.build(); onChanged(); } else { resourceProfilesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public Builder mergeResourceProfiles(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto value) { if (resourceProfilesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && resourceProfiles_ != null && resourceProfiles_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) { resourceProfiles_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.newBuilder(resourceProfiles_).mergeFrom(value).buildPartial(); } else { resourceProfiles_ = value; } onChanged(); } else { resourceProfilesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public Builder clearResourceProfiles() { if (resourceProfilesBuilder_ == null) { resourceProfiles_ = null; onChanged(); } else { resourceProfilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder getResourceProfilesBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResourceProfilesFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder getResourceProfilesOrBuilder() { if (resourceProfilesBuilder_ != null) { return resourceProfilesBuilder_.getMessageOrBuilder(); } else { return resourceProfiles_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance() : resourceProfiles_; } } /** * required .hadoop.yarn.ResourceProfilesProto resource_profiles = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder> getResourceProfilesFieldBuilder() { if (resourceProfilesBuilder_ == null) { resourceProfilesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder>( getResourceProfiles(), getParentForChildren(), isClean()); resourceProfiles_ = null; } return resourceProfilesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceProfilesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceProfilesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAllResourceProfilesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAllResourceProfilesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetResourceProfileRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetResourceProfileRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string profile = 1; */ boolean hasProfile(); /** * required string profile = 1; */ java.lang.String getProfile(); /** * required string profile = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getProfileBytes(); } /** * Protobuf type {@code hadoop.yarn.GetResourceProfileRequestProto} */ public static final class GetResourceProfileRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetResourceProfileRequestProto) GetResourceProfileRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetResourceProfileRequestProto.newBuilder() to construct. private GetResourceProfileRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetResourceProfileRequestProto() { profile_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetResourceProfileRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; profile_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.Builder.class); } private int bitField0_; public static final int PROFILE_FIELD_NUMBER = 1; private volatile java.lang.Object profile_; /** * required string profile = 1; */ public boolean hasProfile() { return ((bitField0_ & 0x00000001) != 0); } /** * required string profile = 1; */ public java.lang.String getProfile() { java.lang.Object ref = profile_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { profile_ = s; } return s; } } /** * required string profile = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getProfileBytes() { java.lang.Object ref = profile_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); profile_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasProfile()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, profile_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, profile_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto) obj; if (hasProfile() != other.hasProfile()) return false; if (hasProfile()) { if (!getProfile() .equals(other.getProfile())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasProfile()) { hash = (37 * hash) + PROFILE_FIELD_NUMBER; hash = (53 * hash) + getProfile().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetResourceProfileRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetResourceProfileRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); profile_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.profile_ = profile_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto.getDefaultInstance()) return this; if (other.hasProfile()) { bitField0_ |= 0x00000001; profile_ = other.profile_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasProfile()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object profile_ = ""; /** * required string profile = 1; */ public boolean hasProfile() { return ((bitField0_ & 0x00000001) != 0); } /** * required string profile = 1; */ public java.lang.String getProfile() { java.lang.Object ref = profile_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { profile_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string profile = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getProfileBytes() { java.lang.Object ref = profile_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); profile_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string profile = 1; */ public Builder setProfile( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; profile_ = value; onChanged(); return this; } /** * required string profile = 1; */ public Builder clearProfile() { bitField0_ = (bitField0_ & ~0x00000001); profile_ = getDefaultInstance().getProfile(); onChanged(); return this; } /** * required string profile = 1; */ public Builder setProfileBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; profile_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetResourceProfileRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetResourceProfileRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetResourceProfileRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetResourceProfileRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetResourceProfileResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetResourceProfileResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ResourceProto resources = 1; */ boolean hasResources(); /** * required .hadoop.yarn.ResourceProto resources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources(); /** * required .hadoop.yarn.ResourceProto resources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetResourceProfileResponseProto} */ public static final class GetResourceProfileResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetResourceProfileResponseProto) GetResourceProfileResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetResourceProfileResponseProto.newBuilder() to construct. private GetResourceProfileResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetResourceProfileResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetResourceProfileResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = resources_.toBuilder(); } resources_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(resources_); resources_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.Builder.class); } private int bitField0_; public static final int RESOURCES_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; /** * required .hadoop.yarn.ResourceProto resources = 1; */ public boolean hasResources() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasResources()) { memoizedIsInitialized = 0; return false; } if (!getResources().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getResources()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getResources()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto) obj; if (hasResources() != other.hasResources()) return false; if (hasResources()) { if (!getResources() .equals(other.getResources())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResources()) { hash = (37 * hash) + RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getResources().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetResourceProfileResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetResourceProfileResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourcesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (resourcesBuilder_ == null) { resources_ = null; } else { resourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (resourcesBuilder_ == null) { result.resources_ = resources_; } else { result.resources_ = resourcesBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto.getDefaultInstance()) return this; if (other.hasResources()) { mergeResources(other.getResources()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasResources()) { return false; } if (!getResources().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_; /** * required .hadoop.yarn.ResourceProto resources = 1; */ public boolean hasResources() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { if (resourcesBuilder_ == null) { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } else { return resourcesBuilder_.getMessage(); } } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public Builder setResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resources_ = value; onChanged(); } else { resourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public Builder setResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourcesBuilder_ == null) { resources_ = builderForValue.build(); onChanged(); } else { resourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public Builder mergeResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && resources_ != null && resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { resources_ = org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder(resources_).mergeFrom(value).buildPartial(); } else { resources_ = value; } onChanged(); } else { resourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public Builder clearResources() { if (resourcesBuilder_ == null) { resources_ = null; onChanged(); } else { resourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResourcesFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ResourceProto resources = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { if (resourcesBuilder_ != null) { return resourcesBuilder_.getMessageOrBuilder(); } else { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } } /** * required .hadoop.yarn.ResourceProto resources = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourcesFieldBuilder() { if (resourcesBuilder_ == null) { resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResources(), getParentForChildren(), isClean()); resources_ = null; } return resourcesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetResourceProfileResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetResourceProfileResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetResourceProfileResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetResourceProfileResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAllResourceTypeInfoRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceTypeInfoRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoRequestProto} */ public static final class GetAllResourceTypeInfoRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceTypeInfoRequestProto) GetAllResourceTypeInfoRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAllResourceTypeInfoRequestProto.newBuilder() to construct. private GetAllResourceTypeInfoRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAllResourceTypeInfoRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAllResourceTypeInfoRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceTypeInfoRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceTypeInfoRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceTypeInfoRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAllResourceTypeInfoRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAllResourceTypeInfoRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetAllResourceTypeInfoResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetAllResourceTypeInfoResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ java.util.List getResourceTypeInfoList(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ int getResourceTypeInfoCount(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ java.util.List getResourceTypeInfoOrBuilderList(); /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoResponseProto} */ public static final class GetAllResourceTypeInfoResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetAllResourceTypeInfoResponseProto) GetAllResourceTypeInfoResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetAllResourceTypeInfoResponseProto.newBuilder() to construct. private GetAllResourceTypeInfoResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetAllResourceTypeInfoResponseProto() { resourceTypeInfo_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetAllResourceTypeInfoResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { resourceTypeInfo_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } resourceTypeInfo_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { resourceTypeInfo_ = java.util.Collections.unmodifiableList(resourceTypeInfo_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.Builder.class); } public static final int RESOURCE_TYPE_INFO_FIELD_NUMBER = 1; private java.util.List resourceTypeInfo_; /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public java.util.List getResourceTypeInfoList() { return resourceTypeInfo_; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public java.util.List getResourceTypeInfoOrBuilderList() { return resourceTypeInfo_; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public int getResourceTypeInfoCount() { return resourceTypeInfo_.size(); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index) { return resourceTypeInfo_.get(index); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder( int index) { return resourceTypeInfo_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getResourceTypeInfoCount(); i++) { if (!getResourceTypeInfo(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < resourceTypeInfo_.size(); i++) { output.writeMessage(1, resourceTypeInfo_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < resourceTypeInfo_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, resourceTypeInfo_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto) obj; if (!getResourceTypeInfoList() .equals(other.getResourceTypeInfoList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResourceTypeInfoCount() > 0) { hash = (37 * hash) + RESOURCE_TYPE_INFO_FIELD_NUMBER; hash = (53 * hash) + getResourceTypeInfoList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetAllResourceTypeInfoResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetAllResourceTypeInfoResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceTypeInfoFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (resourceTypeInfoBuilder_ == null) { resourceTypeInfo_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { resourceTypeInfoBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto(this); int from_bitField0_ = bitField0_; if (resourceTypeInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { resourceTypeInfo_ = java.util.Collections.unmodifiableList(resourceTypeInfo_); bitField0_ = (bitField0_ & ~0x00000001); } result.resourceTypeInfo_ = resourceTypeInfo_; } else { result.resourceTypeInfo_ = resourceTypeInfoBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto.getDefaultInstance()) return this; if (resourceTypeInfoBuilder_ == null) { if (!other.resourceTypeInfo_.isEmpty()) { if (resourceTypeInfo_.isEmpty()) { resourceTypeInfo_ = other.resourceTypeInfo_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.addAll(other.resourceTypeInfo_); } onChanged(); } } else { if (!other.resourceTypeInfo_.isEmpty()) { if (resourceTypeInfoBuilder_.isEmpty()) { resourceTypeInfoBuilder_.dispose(); resourceTypeInfoBuilder_ = null; resourceTypeInfo_ = other.resourceTypeInfo_; bitField0_ = (bitField0_ & ~0x00000001); resourceTypeInfoBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResourceTypeInfoFieldBuilder() : null; } else { resourceTypeInfoBuilder_.addAllMessages(other.resourceTypeInfo_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getResourceTypeInfoCount(); i++) { if (!getResourceTypeInfo(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List resourceTypeInfo_ = java.util.Collections.emptyList(); private void ensureResourceTypeInfoIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { resourceTypeInfo_ = new java.util.ArrayList(resourceTypeInfo_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> resourceTypeInfoBuilder_; /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public java.util.List getResourceTypeInfoList() { if (resourceTypeInfoBuilder_ == null) { return java.util.Collections.unmodifiableList(resourceTypeInfo_); } else { return resourceTypeInfoBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public int getResourceTypeInfoCount() { if (resourceTypeInfoBuilder_ == null) { return resourceTypeInfo_.size(); } else { return resourceTypeInfoBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getResourceTypeInfo(int index) { if (resourceTypeInfoBuilder_ == null) { return resourceTypeInfo_.get(index); } else { return resourceTypeInfoBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder setResourceTypeInfo( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypeInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.set(index, value); onChanged(); } else { resourceTypeInfoBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder setResourceTypeInfo( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypeInfoBuilder_ == null) { ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.set(index, builderForValue.build()); onChanged(); } else { resourceTypeInfoBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder addResourceTypeInfo(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypeInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.add(value); onChanged(); } else { resourceTypeInfoBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder addResourceTypeInfo( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto value) { if (resourceTypeInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.add(index, value); onChanged(); } else { resourceTypeInfoBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder addResourceTypeInfo( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypeInfoBuilder_ == null) { ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.add(builderForValue.build()); onChanged(); } else { resourceTypeInfoBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder addResourceTypeInfo( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder builderForValue) { if (resourceTypeInfoBuilder_ == null) { ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.add(index, builderForValue.build()); onChanged(); } else { resourceTypeInfoBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder addAllResourceTypeInfo( java.lang.Iterable values) { if (resourceTypeInfoBuilder_ == null) { ensureResourceTypeInfoIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, resourceTypeInfo_); onChanged(); } else { resourceTypeInfoBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder clearResourceTypeInfo() { if (resourceTypeInfoBuilder_ == null) { resourceTypeInfo_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { resourceTypeInfoBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public Builder removeResourceTypeInfo(int index) { if (resourceTypeInfoBuilder_ == null) { ensureResourceTypeInfoIsMutable(); resourceTypeInfo_.remove(index); onChanged(); } else { resourceTypeInfoBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder getResourceTypeInfoBuilder( int index) { return getResourceTypeInfoFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder getResourceTypeInfoOrBuilder( int index) { if (resourceTypeInfoBuilder_ == null) { return resourceTypeInfo_.get(index); } else { return resourceTypeInfoBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public java.util.List getResourceTypeInfoOrBuilderList() { if (resourceTypeInfoBuilder_ != null) { return resourceTypeInfoBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resourceTypeInfo_); } } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypeInfoBuilder() { return getResourceTypeInfoFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder addResourceTypeInfoBuilder( int index) { return getResourceTypeInfoFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceTypeInfoProto resource_type_info = 1; */ public java.util.List getResourceTypeInfoBuilderList() { return getResourceTypeInfoFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder> getResourceTypeInfoFieldBuilder() { if (resourceTypeInfoBuilder_ == null) { resourceTypeInfoBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder>( resourceTypeInfo_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); resourceTypeInfo_ = null; } return resourceTypeInfoBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetAllResourceTypeInfoResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetAllResourceTypeInfoResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetAllResourceTypeInfoResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetAllResourceTypeInfoResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StartContainerRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainerRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ boolean hasContainerLaunchContext(); /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext(); /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder(); /** * optional .hadoop.common.TokenProto container_token = 2; */ boolean hasContainerToken(); /** * optional .hadoop.common.TokenProto container_token = 2; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken(); /** * optional .hadoop.common.TokenProto container_token = 2; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.StartContainerRequestProto} */ public static final class StartContainerRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainerRequestProto) StartContainerRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StartContainerRequestProto.newBuilder() to construct. private StartContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StartContainerRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StartContainerRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerLaunchContext_.toBuilder(); } containerLaunchContext_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerLaunchContext_); containerLaunchContext_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = containerToken_.toBuilder(); } containerToken_ = input.readMessage(org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerToken_); containerToken_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_; /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public boolean hasContainerLaunchContext() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } public static final int CONTAINER_TOKEN_FIELD_NUMBER = 2; private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_; /** * optional .hadoop.common.TokenProto container_token = 2; */ public boolean hasContainerToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto container_token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } /** * optional .hadoop.common.TokenProto container_token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasContainerToken()) { if (!getContainerToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerLaunchContext()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getContainerToken()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerLaunchContext()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getContainerToken()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto) obj; if (hasContainerLaunchContext() != other.hasContainerLaunchContext()) return false; if (hasContainerLaunchContext()) { if (!getContainerLaunchContext() .equals(other.getContainerLaunchContext())) return false; } if (hasContainerToken() != other.hasContainerToken()) return false; if (hasContainerToken()) { if (!getContainerToken() .equals(other.getContainerToken())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerLaunchContext()) { hash = (37 * hash) + CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getContainerLaunchContext().hashCode(); } if (hasContainerToken()) { hash = (37 * hash) + CONTAINER_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getContainerToken().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StartContainerRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainerRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerLaunchContextFieldBuilder(); getContainerTokenFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = null; } else { containerLaunchContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (containerTokenBuilder_ == null) { containerToken_ = null; } else { containerTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerLaunchContextBuilder_ == null) { result.containerLaunchContext_ = containerLaunchContext_; } else { result.containerLaunchContext_ = containerLaunchContextBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (containerTokenBuilder_ == null) { result.containerToken_ = containerToken_; } else { result.containerToken_ = containerTokenBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance()) return this; if (other.hasContainerLaunchContext()) { mergeContainerLaunchContext(other.getContainerLaunchContext()); } if (other.hasContainerToken()) { mergeContainerToken(other.getContainerToken()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasContainerToken()) { if (!getContainerToken().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> containerLaunchContextBuilder_; /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public boolean hasContainerLaunchContext() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() { if (containerLaunchContextBuilder_ == null) { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } else { return containerLaunchContextBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public Builder setContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (containerLaunchContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerLaunchContext_ = value; onChanged(); } else { containerLaunchContextBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public Builder setContainerLaunchContext( org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) { if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = builderForValue.build(); onChanged(); } else { containerLaunchContextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public Builder mergeContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (containerLaunchContextBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerLaunchContext_ != null && containerLaunchContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) { containerLaunchContext_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.newBuilder(containerLaunchContext_).mergeFrom(value).buildPartial(); } else { containerLaunchContext_ = value; } onChanged(); } else { containerLaunchContextBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public Builder clearContainerLaunchContext() { if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = null; onChanged(); } else { containerLaunchContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getContainerLaunchContextBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerLaunchContextFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() { if (containerLaunchContextBuilder_ != null) { return containerLaunchContextBuilder_.getMessageOrBuilder(); } else { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> getContainerLaunchContextFieldBuilder() { if (containerLaunchContextBuilder_ == null) { containerLaunchContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>( getContainerLaunchContext(), getParentForChildren(), isClean()); containerLaunchContext_ = null; } return containerLaunchContextBuilder_; } private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> containerTokenBuilder_; /** * optional .hadoop.common.TokenProto container_token = 2; */ public boolean hasContainerToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto container_token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() { if (containerTokenBuilder_ == null) { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } else { return containerTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto container_token = 2; */ public Builder setContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (containerTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerToken_ = value; onChanged(); } else { containerTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto container_token = 2; */ public Builder setContainerToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (containerTokenBuilder_ == null) { containerToken_ = builderForValue.build(); onChanged(); } else { containerTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto container_token = 2; */ public Builder mergeContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (containerTokenBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && containerToken_ != null && containerToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { containerToken_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(containerToken_).mergeFrom(value).buildPartial(); } else { containerToken_ = value; } onChanged(); } else { containerTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.common.TokenProto container_token = 2; */ public Builder clearContainerToken() { if (containerTokenBuilder_ == null) { containerToken_ = null; onChanged(); } else { containerTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.common.TokenProto container_token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getContainerTokenBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContainerTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto container_token = 2; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() { if (containerTokenBuilder_ != null) { return containerTokenBuilder_.getMessageOrBuilder(); } else { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } } /** * optional .hadoop.common.TokenProto container_token = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getContainerTokenFieldBuilder() { if (containerTokenBuilder_ == null) { containerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getContainerToken(), getParentForChildren(), isClean()); containerToken_ = null; } return containerTokenBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainerRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainerRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StartContainerRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StartContainerRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StartContainerResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainerResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ java.util.List getServicesMetaDataList(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ int getServicesMetaDataCount(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ java.util.List getServicesMetaDataOrBuilderList(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.StartContainerResponseProto} */ public static final class StartContainerResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainerResponseProto) StartContainerResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StartContainerResponseProto.newBuilder() to construct. private StartContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StartContainerResponseProto() { servicesMetaData_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StartContainerResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } servicesMetaData_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.Builder.class); } public static final int SERVICES_META_DATA_FIELD_NUMBER = 1; private java.util.List servicesMetaData_; /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataList() { return servicesMetaData_; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataOrBuilderList() { return servicesMetaData_; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public int getServicesMetaDataCount() { return servicesMetaData_.size(); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) { return servicesMetaData_.get(index); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index) { return servicesMetaData_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < servicesMetaData_.size(); i++) { output.writeMessage(1, servicesMetaData_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < servicesMetaData_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, servicesMetaData_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto) obj; if (!getServicesMetaDataList() .equals(other.getServicesMetaDataList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getServicesMetaDataCount() > 0) { hash = (37 * hash) + SERVICES_META_DATA_FIELD_NUMBER; hash = (53 * hash) + getServicesMetaDataList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StartContainerResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainerResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getServicesMetaDataFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (servicesMetaDataBuilder_ == null) { servicesMetaData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { servicesMetaDataBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainerResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto(this); int from_bitField0_ = bitField0_; if (servicesMetaDataBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_); bitField0_ = (bitField0_ & ~0x00000001); } result.servicesMetaData_ = servicesMetaData_; } else { result.servicesMetaData_ = servicesMetaDataBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto.getDefaultInstance()) return this; if (servicesMetaDataBuilder_ == null) { if (!other.servicesMetaData_.isEmpty()) { if (servicesMetaData_.isEmpty()) { servicesMetaData_ = other.servicesMetaData_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureServicesMetaDataIsMutable(); servicesMetaData_.addAll(other.servicesMetaData_); } onChanged(); } } else { if (!other.servicesMetaData_.isEmpty()) { if (servicesMetaDataBuilder_.isEmpty()) { servicesMetaDataBuilder_.dispose(); servicesMetaDataBuilder_ = null; servicesMetaData_ = other.servicesMetaData_; bitField0_ = (bitField0_ & ~0x00000001); servicesMetaDataBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getServicesMetaDataFieldBuilder() : null; } else { servicesMetaDataBuilder_.addAllMessages(other.servicesMetaData_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List servicesMetaData_ = java.util.Collections.emptyList(); private void ensureServicesMetaDataIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = new java.util.ArrayList(servicesMetaData_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> servicesMetaDataBuilder_; /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataList() { if (servicesMetaDataBuilder_ == null) { return java.util.Collections.unmodifiableList(servicesMetaData_); } else { return servicesMetaDataBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public int getServicesMetaDataCount() { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.size(); } else { return servicesMetaDataBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.get(index); } else { return servicesMetaDataBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder setServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.set(index, value); onChanged(); } else { servicesMetaDataBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder setServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.set(index, builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.add(value); onChanged(); } else { servicesMetaDataBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.add(index, value); onChanged(); } else { servicesMetaDataBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.add(builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.add(index, builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addAllServicesMetaData( java.lang.Iterable values) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, servicesMetaData_); onChanged(); } else { servicesMetaDataBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder clearServicesMetaData() { if (servicesMetaDataBuilder_ == null) { servicesMetaData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { servicesMetaDataBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder removeServicesMetaData(int index) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.remove(index); onChanged(); } else { servicesMetaDataBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServicesMetaDataBuilder( int index) { return getServicesMetaDataFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index) { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.get(index); } else { return servicesMetaDataBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataOrBuilderList() { if (servicesMetaDataBuilder_ != null) { return servicesMetaDataBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(servicesMetaData_); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder() { return getServicesMetaDataFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder( int index) { return getServicesMetaDataFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataBuilderList() { return getServicesMetaDataFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> getServicesMetaDataFieldBuilder() { if (servicesMetaDataBuilder_ == null) { servicesMetaDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>( servicesMetaData_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); servicesMetaData_ = null; } return servicesMetaDataBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainerResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainerResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StartContainerResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StartContainerResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StopContainerRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainerRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.StopContainerRequestProto} */ public static final class StopContainerRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainerRequestProto) StopContainerRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StopContainerRequestProto.newBuilder() to construct. private StopContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StopContainerRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopContainerRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StopContainerRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainerRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainerRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainerRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StopContainerRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StopContainerRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StopContainerResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainerResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.StopContainerResponseProto} */ public static final class StopContainerResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainerResponseProto) StopContainerResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StopContainerResponseProto.newBuilder() to construct. private StopContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StopContainerResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopContainerResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StopContainerResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainerResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainerResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainerResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainerResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StopContainerResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StopContainerResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceLocalizationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceLocalizationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ java.util.List getLocalResourcesList(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index); /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ int getLocalResourcesCount(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ java.util.List getLocalResourcesOrBuilderList(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ResourceLocalizationRequestProto} */ public static final class ResourceLocalizationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceLocalizationRequestProto) ResourceLocalizationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceLocalizationRequestProto.newBuilder() to construct. private ResourceLocalizationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceLocalizationRequestProto() { localResources_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResourceLocalizationRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { localResources_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } localResources_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { localResources_ = java.util.Collections.unmodifiableList(localResources_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int LOCAL_RESOURCES_FIELD_NUMBER = 2; private java.util.List localResources_; /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public java.util.List getLocalResourcesList() { return localResources_; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public java.util.List getLocalResourcesOrBuilderList() { return localResources_; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public int getLocalResourcesCount() { return localResources_.size(); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) { return localResources_.get(index); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index) { return localResources_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } for (int i = 0; i < localResources_.size(); i++) { output.writeMessage(2, localResources_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } for (int i = 0; i < localResources_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, localResources_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (!getLocalResourcesList() .equals(other.getLocalResourcesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (getLocalResourcesCount() > 0) { hash = (37 * hash) + LOCAL_RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getLocalResourcesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceLocalizationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceLocalizationRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getLocalResourcesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (localResourcesBuilder_ == null) { localResources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { localResourcesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (localResourcesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { localResources_ = java.util.Collections.unmodifiableList(localResources_); bitField0_ = (bitField0_ & ~0x00000002); } result.localResources_ = localResources_; } else { result.localResources_ = localResourcesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (localResourcesBuilder_ == null) { if (!other.localResources_.isEmpty()) { if (localResources_.isEmpty()) { localResources_ = other.localResources_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureLocalResourcesIsMutable(); localResources_.addAll(other.localResources_); } onChanged(); } } else { if (!other.localResources_.isEmpty()) { if (localResourcesBuilder_.isEmpty()) { localResourcesBuilder_.dispose(); localResourcesBuilder_ = null; localResources_ = other.localResources_; bitField0_ = (bitField0_ & ~0x00000002); localResourcesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLocalResourcesFieldBuilder() : null; } else { localResourcesBuilder_.addAllMessages(other.localResources_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private java.util.List localResources_ = java.util.Collections.emptyList(); private void ensureLocalResourcesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { localResources_ = new java.util.ArrayList(localResources_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> localResourcesBuilder_; /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public java.util.List getLocalResourcesList() { if (localResourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(localResources_); } else { return localResourcesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public int getLocalResourcesCount() { if (localResourcesBuilder_ == null) { return localResources_.size(); } else { return localResourcesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) { if (localResourcesBuilder_ == null) { return localResources_.get(index); } else { return localResourcesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder setLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.set(index, value); onChanged(); } else { localResourcesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder setLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.set(index, builderForValue.build()); onChanged(); } else { localResourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder addLocalResources(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.add(value); onChanged(); } else { localResourcesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder addLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.add(index, value); onChanged(); } else { localResourcesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder addLocalResources( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.add(builderForValue.build()); onChanged(); } else { localResourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder addLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.add(index, builderForValue.build()); onChanged(); } else { localResourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder addAllLocalResources( java.lang.Iterable values) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, localResources_); onChanged(); } else { localResourcesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder clearLocalResources() { if (localResourcesBuilder_ == null) { localResources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { localResourcesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public Builder removeLocalResources(int index) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.remove(index); onChanged(); } else { localResourcesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder getLocalResourcesBuilder( int index) { return getLocalResourcesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index) { if (localResourcesBuilder_ == null) { return localResources_.get(index); } else { return localResourcesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public java.util.List getLocalResourcesOrBuilderList() { if (localResourcesBuilder_ != null) { return localResourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(localResources_); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder() { return getLocalResourcesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder( int index) { return getLocalResourcesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto local_resources = 2; */ public java.util.List getLocalResourcesBuilderList() { return getLocalResourcesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> getLocalResourcesFieldBuilder() { if (localResourcesBuilder_ == null) { localResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder>( localResources_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); localResources_ = null; } return localResourcesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceLocalizationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceLocalizationRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceLocalizationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ResourceLocalizationRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceLocalizationResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceLocalizationResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ResourceLocalizationResponseProto} */ public static final class ResourceLocalizationResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceLocalizationResponseProto) ResourceLocalizationResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceLocalizationResponseProto.newBuilder() to construct. private ResourceLocalizationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceLocalizationResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResourceLocalizationResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceLocalizationResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceLocalizationResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceLocalizationResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceLocalizationResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceLocalizationResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ResourceLocalizationResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ResourceLocalizationResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReInitializeContainerRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReInitializeContainerRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ boolean hasContainerLaunchContext(); /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext(); /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder(); /** * optional bool auto_commit = 3 [default = true]; */ boolean hasAutoCommit(); /** * optional bool auto_commit = 3 [default = true]; */ boolean getAutoCommit(); } /** * Protobuf type {@code hadoop.yarn.ReInitializeContainerRequestProto} */ public static final class ReInitializeContainerRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReInitializeContainerRequestProto) ReInitializeContainerRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReInitializeContainerRequestProto.newBuilder() to construct. private ReInitializeContainerRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReInitializeContainerRequestProto() { autoCommit_ = true; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReInitializeContainerRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = containerLaunchContext_.toBuilder(); } containerLaunchContext_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerLaunchContext_); containerLaunchContext_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { bitField0_ |= 0x00000004; autoCommit_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_; /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public boolean hasContainerLaunchContext() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } public static final int AUTO_COMMIT_FIELD_NUMBER = 3; private boolean autoCommit_; /** * optional bool auto_commit = 3 [default = true]; */ public boolean hasAutoCommit() { return ((bitField0_ & 0x00000004) != 0); } /** * optional bool auto_commit = 3 [default = true]; */ public boolean getAutoCommit() { return autoCommit_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getContainerLaunchContext()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeBool(3, autoCommit_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getContainerLaunchContext()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(3, autoCommit_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasContainerLaunchContext() != other.hasContainerLaunchContext()) return false; if (hasContainerLaunchContext()) { if (!getContainerLaunchContext() .equals(other.getContainerLaunchContext())) return false; } if (hasAutoCommit() != other.hasAutoCommit()) return false; if (hasAutoCommit()) { if (getAutoCommit() != other.getAutoCommit()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasContainerLaunchContext()) { hash = (37 * hash) + CONTAINER_LAUNCH_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getContainerLaunchContext().hashCode(); } if (hasAutoCommit()) { hash = (37 * hash) + AUTO_COMMIT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getAutoCommit()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReInitializeContainerRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReInitializeContainerRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getContainerLaunchContextFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = null; } else { containerLaunchContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); autoCommit_ = true; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (containerLaunchContextBuilder_ == null) { result.containerLaunchContext_ = containerLaunchContext_; } else { result.containerLaunchContext_ = containerLaunchContextBuilder_.build(); } to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.autoCommit_ = autoCommit_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasContainerLaunchContext()) { mergeContainerLaunchContext(other.getContainerLaunchContext()); } if (other.hasAutoCommit()) { setAutoCommit(other.getAutoCommit()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto containerLaunchContext_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> containerLaunchContextBuilder_; /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public boolean hasContainerLaunchContext() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getContainerLaunchContext() { if (containerLaunchContextBuilder_ == null) { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } else { return containerLaunchContextBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public Builder setContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (containerLaunchContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerLaunchContext_ = value; onChanged(); } else { containerLaunchContextBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public Builder setContainerLaunchContext( org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) { if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = builderForValue.build(); onChanged(); } else { containerLaunchContextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public Builder mergeContainerLaunchContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (containerLaunchContextBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && containerLaunchContext_ != null && containerLaunchContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) { containerLaunchContext_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.newBuilder(containerLaunchContext_).mergeFrom(value).buildPartial(); } else { containerLaunchContext_ = value; } onChanged(); } else { containerLaunchContextBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public Builder clearContainerLaunchContext() { if (containerLaunchContextBuilder_ == null) { containerLaunchContext_ = null; onChanged(); } else { containerLaunchContextBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getContainerLaunchContextBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContainerLaunchContextFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getContainerLaunchContextOrBuilder() { if (containerLaunchContextBuilder_ != null) { return containerLaunchContextBuilder_.getMessageOrBuilder(); } else { return containerLaunchContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : containerLaunchContext_; } } /** * optional .hadoop.yarn.ContainerLaunchContextProto container_launch_context = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> getContainerLaunchContextFieldBuilder() { if (containerLaunchContextBuilder_ == null) { containerLaunchContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>( getContainerLaunchContext(), getParentForChildren(), isClean()); containerLaunchContext_ = null; } return containerLaunchContextBuilder_; } private boolean autoCommit_ = true; /** * optional bool auto_commit = 3 [default = true]; */ public boolean hasAutoCommit() { return ((bitField0_ & 0x00000004) != 0); } /** * optional bool auto_commit = 3 [default = true]; */ public boolean getAutoCommit() { return autoCommit_; } /** * optional bool auto_commit = 3 [default = true]; */ public Builder setAutoCommit(boolean value) { bitField0_ |= 0x00000004; autoCommit_ = value; onChanged(); return this; } /** * optional bool auto_commit = 3 [default = true]; */ public Builder clearAutoCommit() { bitField0_ = (bitField0_ & ~0x00000004); autoCommit_ = true; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReInitializeContainerRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReInitializeContainerRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReInitializeContainerRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReInitializeContainerRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReInitializeContainerResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReInitializeContainerResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ReInitializeContainerResponseProto} */ public static final class ReInitializeContainerResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReInitializeContainerResponseProto) ReInitializeContainerResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReInitializeContainerResponseProto.newBuilder() to construct. private ReInitializeContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReInitializeContainerResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReInitializeContainerResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReInitializeContainerResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReInitializeContainerResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReInitializeContainerResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReInitializeContainerResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReInitializeContainerResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReInitializeContainerResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReInitializeContainerResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RestartContainerResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RestartContainerResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.RestartContainerResponseProto} */ public static final class RestartContainerResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RestartContainerResponseProto) RestartContainerResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RestartContainerResponseProto.newBuilder() to construct. private RestartContainerResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RestartContainerResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RestartContainerResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RestartContainerResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RestartContainerResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RestartContainerResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RestartContainerResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RestartContainerResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RestartContainerResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RestartContainerResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RollbackResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RollbackResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.RollbackResponseProto} */ public static final class RollbackResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RollbackResponseProto) RollbackResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RollbackResponseProto.newBuilder() to construct. private RollbackResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RollbackResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RollbackResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RollbackResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RollbackResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RollbackResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RollbackResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RollbackResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RollbackResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RollbackResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RollbackResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CommitResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.CommitResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.CommitResponseProto} */ public static final class CommitResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.CommitResponseProto) CommitResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use CommitResponseProto.newBuilder() to construct. private CommitResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CommitResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CommitResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.CommitResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.CommitResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_CommitResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.CommitResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.CommitResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public CommitResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new CommitResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.CommitResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StartContainersRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainersRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ java.util.List getStartContainerRequestList(); /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index); /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ int getStartContainerRequestCount(); /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ java.util.List getStartContainerRequestOrBuilderList(); /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder( int index); } /** *
   *// bulk API records
   * 
* * Protobuf type {@code hadoop.yarn.StartContainersRequestProto} */ public static final class StartContainersRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainersRequestProto) StartContainersRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StartContainersRequestProto.newBuilder() to construct. private StartContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StartContainersRequestProto() { startContainerRequest_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StartContainersRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { startContainerRequest_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } startContainerRequest_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { startContainerRequest_ = java.util.Collections.unmodifiableList(startContainerRequest_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.Builder.class); } public static final int START_CONTAINER_REQUEST_FIELD_NUMBER = 1; private java.util.List startContainerRequest_; /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public java.util.List getStartContainerRequestList() { return startContainerRequest_; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public java.util.List getStartContainerRequestOrBuilderList() { return startContainerRequest_; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public int getStartContainerRequestCount() { return startContainerRequest_.size(); } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index) { return startContainerRequest_.get(index); } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder( int index) { return startContainerRequest_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getStartContainerRequestCount(); i++) { if (!getStartContainerRequest(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < startContainerRequest_.size(); i++) { output.writeMessage(1, startContainerRequest_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < startContainerRequest_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, startContainerRequest_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto) obj; if (!getStartContainerRequestList() .equals(other.getStartContainerRequestList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getStartContainerRequestCount() > 0) { hash = (37 * hash) + START_CONTAINER_REQUEST_FIELD_NUMBER; hash = (53 * hash) + getStartContainerRequestList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     *// bulk API records
     * 
* * Protobuf type {@code hadoop.yarn.StartContainersRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainersRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getStartContainerRequestFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (startContainerRequestBuilder_ == null) { startContainerRequest_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { startContainerRequestBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto(this); int from_bitField0_ = bitField0_; if (startContainerRequestBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { startContainerRequest_ = java.util.Collections.unmodifiableList(startContainerRequest_); bitField0_ = (bitField0_ & ~0x00000001); } result.startContainerRequest_ = startContainerRequest_; } else { result.startContainerRequest_ = startContainerRequestBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto.getDefaultInstance()) return this; if (startContainerRequestBuilder_ == null) { if (!other.startContainerRequest_.isEmpty()) { if (startContainerRequest_.isEmpty()) { startContainerRequest_ = other.startContainerRequest_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureStartContainerRequestIsMutable(); startContainerRequest_.addAll(other.startContainerRequest_); } onChanged(); } } else { if (!other.startContainerRequest_.isEmpty()) { if (startContainerRequestBuilder_.isEmpty()) { startContainerRequestBuilder_.dispose(); startContainerRequestBuilder_ = null; startContainerRequest_ = other.startContainerRequest_; bitField0_ = (bitField0_ & ~0x00000001); startContainerRequestBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStartContainerRequestFieldBuilder() : null; } else { startContainerRequestBuilder_.addAllMessages(other.startContainerRequest_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getStartContainerRequestCount(); i++) { if (!getStartContainerRequest(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List startContainerRequest_ = java.util.Collections.emptyList(); private void ensureStartContainerRequestIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { startContainerRequest_ = new java.util.ArrayList(startContainerRequest_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> startContainerRequestBuilder_; /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public java.util.List getStartContainerRequestList() { if (startContainerRequestBuilder_ == null) { return java.util.Collections.unmodifiableList(startContainerRequest_); } else { return startContainerRequestBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public int getStartContainerRequestCount() { if (startContainerRequestBuilder_ == null) { return startContainerRequest_.size(); } else { return startContainerRequestBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto getStartContainerRequest(int index) { if (startContainerRequestBuilder_ == null) { return startContainerRequest_.get(index); } else { return startContainerRequestBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder setStartContainerRequest( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) { if (startContainerRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStartContainerRequestIsMutable(); startContainerRequest_.set(index, value); onChanged(); } else { startContainerRequestBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder setStartContainerRequest( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) { if (startContainerRequestBuilder_ == null) { ensureStartContainerRequestIsMutable(); startContainerRequest_.set(index, builderForValue.build()); onChanged(); } else { startContainerRequestBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder addStartContainerRequest(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) { if (startContainerRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStartContainerRequestIsMutable(); startContainerRequest_.add(value); onChanged(); } else { startContainerRequestBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder addStartContainerRequest( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto value) { if (startContainerRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStartContainerRequestIsMutable(); startContainerRequest_.add(index, value); onChanged(); } else { startContainerRequestBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder addStartContainerRequest( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) { if (startContainerRequestBuilder_ == null) { ensureStartContainerRequestIsMutable(); startContainerRequest_.add(builderForValue.build()); onChanged(); } else { startContainerRequestBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder addStartContainerRequest( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder builderForValue) { if (startContainerRequestBuilder_ == null) { ensureStartContainerRequestIsMutable(); startContainerRequest_.add(index, builderForValue.build()); onChanged(); } else { startContainerRequestBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder addAllStartContainerRequest( java.lang.Iterable values) { if (startContainerRequestBuilder_ == null) { ensureStartContainerRequestIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, startContainerRequest_); onChanged(); } else { startContainerRequestBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder clearStartContainerRequest() { if (startContainerRequestBuilder_ == null) { startContainerRequest_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { startContainerRequestBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public Builder removeStartContainerRequest(int index) { if (startContainerRequestBuilder_ == null) { ensureStartContainerRequestIsMutable(); startContainerRequest_.remove(index); onChanged(); } else { startContainerRequestBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder getStartContainerRequestBuilder( int index) { return getStartContainerRequestFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder getStartContainerRequestOrBuilder( int index) { if (startContainerRequestBuilder_ == null) { return startContainerRequest_.get(index); } else { return startContainerRequestBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public java.util.List getStartContainerRequestOrBuilderList() { if (startContainerRequestBuilder_ != null) { return startContainerRequestBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(startContainerRequest_); } } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder addStartContainerRequestBuilder() { return getStartContainerRequestFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder addStartContainerRequestBuilder( int index) { return getStartContainerRequestFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StartContainerRequestProto start_container_request = 1; */ public java.util.List getStartContainerRequestBuilderList() { return getStartContainerRequestFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder> getStartContainerRequestFieldBuilder() { if (startContainerRequestBuilder_ == null) { startContainerRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProtoOrBuilder>( startContainerRequest_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); startContainerRequest_ = null; } return startContainerRequestBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainersRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainersRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StartContainersRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StartContainersRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerExceptionMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerExceptionMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ boolean hasException(); /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException(); /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ContainerExceptionMapProto} */ public static final class ContainerExceptionMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerExceptionMapProto) ContainerExceptionMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerExceptionMapProto.newBuilder() to construct. private ContainerExceptionMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerExceptionMapProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ContainerExceptionMapProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto exception_; /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException() { return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_; } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder() { return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getException()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getException()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasException() != other.hasException()) return false; if (hasException()) { if (!getException() .equals(other.getException())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerExceptionMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerExceptionMapProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getExceptionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (exceptionBuilder_ == null) { exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasException()) { mergeException(other.getException()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto exception_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> exceptionBuilder_; /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public boolean hasException() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getException() { if (exceptionBuilder_ == null) { return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public Builder setException(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public Builder setException( org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public Builder mergeException(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && exception_ != null && exception_ != org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) { exception_ = org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : exception_; } } /** * optional .hadoop.yarn.SerializedExceptionProto exception = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder>( getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerExceptionMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerExceptionMapProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerExceptionMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ContainerExceptionMapProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StartContainersResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StartContainersResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ java.util.List getServicesMetaDataList(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ int getServicesMetaDataCount(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ java.util.List getServicesMetaDataOrBuilderList(); /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ java.util.List getSucceededRequestsList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ int getSucceededRequestsCount(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ java.util.List getSucceededRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.StartContainersResponseProto} */ public static final class StartContainersResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StartContainersResponseProto) StartContainersResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StartContainersResponseProto.newBuilder() to construct. private StartContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StartContainersResponseProto() { servicesMetaData_ = java.util.Collections.emptyList(); succeededRequests_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StartContainersResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } servicesMetaData_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { succeededRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } succeededRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000004; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); } if (((mutable_bitField0_ & 0x00000004) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.Builder.class); } public static final int SERVICES_META_DATA_FIELD_NUMBER = 1; private java.util.List servicesMetaData_; /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataList() { return servicesMetaData_; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataOrBuilderList() { return servicesMetaData_; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public int getServicesMetaDataCount() { return servicesMetaData_.size(); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) { return servicesMetaData_.get(index); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index) { return servicesMetaData_.get(index); } public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 2; private java.util.List succeededRequests_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public java.util.List getSucceededRequestsList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public java.util.List getSucceededRequestsOrBuilderList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public int getSucceededRequestsCount() { return succeededRequests_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { return succeededRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { return succeededRequests_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 3; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < servicesMetaData_.size(); i++) { output.writeMessage(1, servicesMetaData_.get(i)); } for (int i = 0; i < succeededRequests_.size(); i++) { output.writeMessage(2, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(3, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < servicesMetaData_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, servicesMetaData_.get(i)); } for (int i = 0; i < succeededRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto) obj; if (!getServicesMetaDataList() .equals(other.getServicesMetaDataList())) return false; if (!getSucceededRequestsList() .equals(other.getSucceededRequestsList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getServicesMetaDataCount() > 0) { hash = (37 * hash) + SERVICES_META_DATA_FIELD_NUMBER; hash = (53 * hash) + getServicesMetaDataList().hashCode(); } if (getSucceededRequestsCount() > 0) { hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getSucceededRequestsList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StartContainersResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StartContainersResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getServicesMetaDataFieldBuilder(); getSucceededRequestsFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (servicesMetaDataBuilder_ == null) { servicesMetaData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { servicesMetaDataBuilder_.clear(); } if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { succeededRequestsBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StartContainersResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto(this); int from_bitField0_ = bitField0_; if (servicesMetaDataBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = java.util.Collections.unmodifiableList(servicesMetaData_); bitField0_ = (bitField0_ & ~0x00000001); } result.servicesMetaData_ = servicesMetaData_; } else { result.servicesMetaData_ = servicesMetaDataBuilder_.build(); } if (succeededRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.succeededRequests_ = succeededRequests_; } else { result.succeededRequests_ = succeededRequestsBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000004); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto.getDefaultInstance()) return this; if (servicesMetaDataBuilder_ == null) { if (!other.servicesMetaData_.isEmpty()) { if (servicesMetaData_.isEmpty()) { servicesMetaData_ = other.servicesMetaData_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureServicesMetaDataIsMutable(); servicesMetaData_.addAll(other.servicesMetaData_); } onChanged(); } } else { if (!other.servicesMetaData_.isEmpty()) { if (servicesMetaDataBuilder_.isEmpty()) { servicesMetaDataBuilder_.dispose(); servicesMetaDataBuilder_ = null; servicesMetaData_ = other.servicesMetaData_; bitField0_ = (bitField0_ & ~0x00000001); servicesMetaDataBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getServicesMetaDataFieldBuilder() : null; } else { servicesMetaDataBuilder_.addAllMessages(other.servicesMetaData_); } } } if (succeededRequestsBuilder_ == null) { if (!other.succeededRequests_.isEmpty()) { if (succeededRequests_.isEmpty()) { succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureSucceededRequestsIsMutable(); succeededRequests_.addAll(other.succeededRequests_); } onChanged(); } } else { if (!other.succeededRequests_.isEmpty()) { if (succeededRequestsBuilder_.isEmpty()) { succeededRequestsBuilder_.dispose(); succeededRequestsBuilder_ = null; succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000002); succeededRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSucceededRequestsFieldBuilder() : null; } else { succeededRequestsBuilder_.addAllMessages(other.succeededRequests_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000004); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List servicesMetaData_ = java.util.Collections.emptyList(); private void ensureServicesMetaDataIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { servicesMetaData_ = new java.util.ArrayList(servicesMetaData_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> servicesMetaDataBuilder_; /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataList() { if (servicesMetaDataBuilder_ == null) { return java.util.Collections.unmodifiableList(servicesMetaData_); } else { return servicesMetaDataBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public int getServicesMetaDataCount() { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.size(); } else { return servicesMetaDataBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServicesMetaData(int index) { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.get(index); } else { return servicesMetaDataBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder setServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.set(index, value); onChanged(); } else { servicesMetaDataBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder setServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.set(index, builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.add(value); onChanged(); } else { servicesMetaDataBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (servicesMetaDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesMetaDataIsMutable(); servicesMetaData_.add(index, value); onChanged(); } else { servicesMetaDataBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.add(builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addServicesMetaData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.add(index, builderForValue.build()); onChanged(); } else { servicesMetaDataBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder addAllServicesMetaData( java.lang.Iterable values) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, servicesMetaData_); onChanged(); } else { servicesMetaDataBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder clearServicesMetaData() { if (servicesMetaDataBuilder_ == null) { servicesMetaData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { servicesMetaDataBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public Builder removeServicesMetaData(int index) { if (servicesMetaDataBuilder_ == null) { ensureServicesMetaDataIsMutable(); servicesMetaData_.remove(index); onChanged(); } else { servicesMetaDataBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServicesMetaDataBuilder( int index) { return getServicesMetaDataFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServicesMetaDataOrBuilder( int index) { if (servicesMetaDataBuilder_ == null) { return servicesMetaData_.get(index); } else { return servicesMetaDataBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataOrBuilderList() { if (servicesMetaDataBuilder_ != null) { return servicesMetaDataBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(servicesMetaData_); } } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder() { return getServicesMetaDataFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServicesMetaDataBuilder( int index) { return getServicesMetaDataFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto services_meta_data = 1; */ public java.util.List getServicesMetaDataBuilderList() { return getServicesMetaDataFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> getServicesMetaDataFieldBuilder() { if (servicesMetaDataBuilder_ == null) { servicesMetaDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>( servicesMetaData_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); servicesMetaData_ = null; } return servicesMetaDataBuilder_; } private java.util.List succeededRequests_ = java.util.Collections.emptyList(); private void ensureSucceededRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { succeededRequests_ = new java.util.ArrayList(succeededRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public java.util.List getSucceededRequestsList() { if (succeededRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(succeededRequests_); } else { return succeededRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public int getSucceededRequestsCount() { if (succeededRequestsBuilder_ == null) { return succeededRequests_.size(); } else { return succeededRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, value); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(value); onChanged(); } else { succeededRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, value); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder addSucceededRequests( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder addAllSucceededRequests( java.lang.Iterable values) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, succeededRequests_); onChanged(); } else { succeededRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder clearSucceededRequests() { if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { succeededRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public Builder removeSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.remove(index); onChanged(); } else { succeededRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public java.util.List getSucceededRequestsOrBuilderList() { if (succeededRequestsBuilder_ != null) { return succeededRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(succeededRequests_); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() { return getSucceededRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 2; */ public java.util.List getSucceededRequestsBuilderList() { return getSucceededRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getSucceededRequestsFieldBuilder() { if (succeededRequestsBuilder_ == null) { succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( succeededRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); succeededRequests_ = null; } return succeededRequestsBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 3; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StartContainersResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StartContainersResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StartContainersResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StartContainersResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StopContainersRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainersRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ int getContainerIdCount(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.StopContainersRequestProto} */ public static final class StopContainersRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainersRequestProto) StopContainersRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StopContainersRequestProto.newBuilder() to construct. private StopContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StopContainersRequestProto() { containerId_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopContainersRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } containerId_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.Builder.class); } public static final int CONTAINER_ID_FIELD_NUMBER = 1; private java.util.List containerId_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { return containerId_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { return containerId_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { return containerId_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < containerId_.size(); i++) { output.writeMessage(1, containerId_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < containerId_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, containerId_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto) obj; if (!getContainerIdList() .equals(other.getContainerIdList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContainerIdCount() > 0) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerIdList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StopContainersRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainersRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { containerIdBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto(this); int from_bitField0_ = bitField0_; if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); bitField0_ = (bitField0_ & ~0x00000001); } result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto.getDefaultInstance()) return this; if (containerIdBuilder_ == null) { if (!other.containerId_.isEmpty()) { if (containerId_.isEmpty()) { containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContainerIdIsMutable(); containerId_.addAll(other.containerId_); } onChanged(); } } else { if (!other.containerId_.isEmpty()) { if (containerIdBuilder_.isEmpty()) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); containerIdBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerIdFieldBuilder() : null; } else { containerIdBuilder_.addAllMessages(other.containerId_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List containerId_ = java.util.Collections.emptyList(); private void ensureContainerIdIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(containerId_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { if (containerIdBuilder_ == null) { return java.util.Collections.unmodifiableList(containerId_); } else { return containerIdBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { if (containerIdBuilder_ == null) { return containerId_.size(); } else { return containerIdBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.set(index, value); onChanged(); } else { containerIdBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.set(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(value); onChanged(); } else { containerIdBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(index, value); onChanged(); } else { containerIdBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addAllContainerId( java.lang.Iterable values) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containerId_); onChanged(); } else { containerIdBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { containerIdBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder removeContainerId(int index) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.remove(index); onChanged(); } else { containerIdBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder( int index) { return getContainerIdFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containerId_); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() { return getContainerIdFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder( int index) { return getContainerIdFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdBuilderList() { return getContainerIdFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( containerId_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainersRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainersRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StopContainersRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StopContainersRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StopContainersResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StopContainersResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ int getSucceededRequestsCount(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.StopContainersResponseProto} */ public static final class StopContainersResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StopContainersResponseProto) StopContainersResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StopContainersResponseProto.newBuilder() to construct. private StopContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StopContainersResponseProto() { succeededRequests_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopContainersResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } succeededRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.Builder.class); } public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1; private java.util.List succeededRequests_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { return succeededRequests_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { return succeededRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { return succeededRequests_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 2; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < succeededRequests_.size(); i++) { output.writeMessage(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(2, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < succeededRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto) obj; if (!getSucceededRequestsList() .equals(other.getSucceededRequestsList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSucceededRequestsCount() > 0) { hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getSucceededRequestsList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StopContainersResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StopContainersResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getSucceededRequestsFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { succeededRequestsBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_StopContainersResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto(this); int from_bitField0_ = bitField0_; if (succeededRequestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); bitField0_ = (bitField0_ & ~0x00000001); } result.succeededRequests_ = succeededRequests_; } else { result.succeededRequests_ = succeededRequestsBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto.getDefaultInstance()) return this; if (succeededRequestsBuilder_ == null) { if (!other.succeededRequests_.isEmpty()) { if (succeededRequests_.isEmpty()) { succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSucceededRequestsIsMutable(); succeededRequests_.addAll(other.succeededRequests_); } onChanged(); } } else { if (!other.succeededRequests_.isEmpty()) { if (succeededRequestsBuilder_.isEmpty()) { succeededRequestsBuilder_.dispose(); succeededRequestsBuilder_ = null; succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); succeededRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSucceededRequestsFieldBuilder() : null; } else { succeededRequestsBuilder_.addAllMessages(other.succeededRequests_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List succeededRequests_ = java.util.Collections.emptyList(); private void ensureSucceededRequestsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(succeededRequests_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { if (succeededRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(succeededRequests_); } else { return succeededRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { if (succeededRequestsBuilder_ == null) { return succeededRequests_.size(); } else { return succeededRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, value); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(value); onChanged(); } else { succeededRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, value); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addAllSucceededRequests( java.lang.Iterable values) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, succeededRequests_); onChanged(); } else { succeededRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder clearSucceededRequests() { if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { succeededRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder removeSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.remove(index); onChanged(); } else { succeededRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { if (succeededRequestsBuilder_ != null) { return succeededRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(succeededRequests_); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() { return getSucceededRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsBuilderList() { return getSucceededRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getSucceededRequestsFieldBuilder() { if (succeededRequestsBuilder_ == null) { succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( succeededRequests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); succeededRequests_ = null; } return succeededRequestsBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StopContainersResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StopContainersResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StopContainersResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new StopContainersResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainerStatusesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerStatusesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ int getContainerIdCount(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetContainerStatusesRequestProto} */ public static final class GetContainerStatusesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerStatusesRequestProto) GetContainerStatusesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainerStatusesRequestProto.newBuilder() to construct. private GetContainerStatusesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainerStatusesRequestProto() { containerId_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainerStatusesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } containerId_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.Builder.class); } public static final int CONTAINER_ID_FIELD_NUMBER = 1; private java.util.List containerId_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { return containerId_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { return containerId_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { return containerId_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < containerId_.size(); i++) { output.writeMessage(1, containerId_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < containerId_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, containerId_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto) obj; if (!getContainerIdList() .equals(other.getContainerIdList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContainerIdCount() > 0) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerIdList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainerStatusesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerStatusesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { containerIdBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto(this); int from_bitField0_ = bitField0_; if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); bitField0_ = (bitField0_ & ~0x00000001); } result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto.getDefaultInstance()) return this; if (containerIdBuilder_ == null) { if (!other.containerId_.isEmpty()) { if (containerId_.isEmpty()) { containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContainerIdIsMutable(); containerId_.addAll(other.containerId_); } onChanged(); } } else { if (!other.containerId_.isEmpty()) { if (containerIdBuilder_.isEmpty()) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); containerIdBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerIdFieldBuilder() : null; } else { containerIdBuilder_.addAllMessages(other.containerId_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List containerId_ = java.util.Collections.emptyList(); private void ensureContainerIdIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(containerId_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { if (containerIdBuilder_ == null) { return java.util.Collections.unmodifiableList(containerId_); } else { return containerIdBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { if (containerIdBuilder_ == null) { return containerId_.size(); } else { return containerIdBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.set(index, value); onChanged(); } else { containerIdBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.set(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(value); onChanged(); } else { containerIdBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(index, value); onChanged(); } else { containerIdBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addAllContainerId( java.lang.Iterable values) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containerId_); onChanged(); } else { containerIdBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { containerIdBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder removeContainerId(int index) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.remove(index); onChanged(); } else { containerIdBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder( int index) { return getContainerIdFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containerId_); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() { return getContainerIdFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder( int index) { return getContainerIdFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdBuilderList() { return getContainerIdFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( containerId_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerStatusesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerStatusesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainerStatusesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainerStatusesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainerStatusesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerStatusesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ java.util.List getStatusList(); /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index); /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ int getStatusCount(); /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ java.util.List getStatusOrBuilderList(); /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetContainerStatusesResponseProto} */ public static final class GetContainerStatusesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerStatusesResponseProto) GetContainerStatusesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainerStatusesResponseProto.newBuilder() to construct. private GetContainerStatusesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainerStatusesResponseProto() { status_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainerStatusesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { status_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } status_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { status_ = java.util.Collections.unmodifiableList(status_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.Builder.class); } public static final int STATUS_FIELD_NUMBER = 1; private java.util.List status_; /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public java.util.List getStatusList() { return status_; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public java.util.List getStatusOrBuilderList() { return status_; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public int getStatusCount() { return status_.size(); } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index) { return status_.get(index); } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder( int index) { return status_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 2; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getStatusCount(); i++) { if (!getStatus(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < status_.size(); i++) { output.writeMessage(1, status_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(2, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < status_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, status_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto) obj; if (!getStatusList() .equals(other.getStatusList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getStatusCount() > 0) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + getStatusList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainerStatusesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerStatusesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getStatusFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (statusBuilder_ == null) { status_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { statusBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto(this); int from_bitField0_ = bitField0_; if (statusBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { status_ = java.util.Collections.unmodifiableList(status_); bitField0_ = (bitField0_ & ~0x00000001); } result.status_ = status_; } else { result.status_ = statusBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto.getDefaultInstance()) return this; if (statusBuilder_ == null) { if (!other.status_.isEmpty()) { if (status_.isEmpty()) { status_ = other.status_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureStatusIsMutable(); status_.addAll(other.status_); } onChanged(); } } else { if (!other.status_.isEmpty()) { if (statusBuilder_.isEmpty()) { statusBuilder_.dispose(); statusBuilder_ = null; status_ = other.status_; bitField0_ = (bitField0_ & ~0x00000001); statusBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStatusFieldBuilder() : null; } else { statusBuilder_.addAllMessages(other.status_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getStatusCount(); i++) { if (!getStatus(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List status_ = java.util.Collections.emptyList(); private void ensureStatusIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { status_ = new java.util.ArrayList(status_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> statusBuilder_; /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public java.util.List getStatusList() { if (statusBuilder_ == null) { return java.util.Collections.unmodifiableList(status_); } else { return statusBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public int getStatusCount() { if (statusBuilder_ == null) { return status_.size(); } else { return statusBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getStatus(int index) { if (statusBuilder_ == null) { return status_.get(index); } else { return statusBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder setStatus( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (statusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatusIsMutable(); status_.set(index, value); onChanged(); } else { statusBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder setStatus( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (statusBuilder_ == null) { ensureStatusIsMutable(); status_.set(index, builderForValue.build()); onChanged(); } else { statusBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder addStatus(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (statusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatusIsMutable(); status_.add(value); onChanged(); } else { statusBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder addStatus( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto value) { if (statusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatusIsMutable(); status_.add(index, value); onChanged(); } else { statusBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder addStatus( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (statusBuilder_ == null) { ensureStatusIsMutable(); status_.add(builderForValue.build()); onChanged(); } else { statusBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder addStatus( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder builderForValue) { if (statusBuilder_ == null) { ensureStatusIsMutable(); status_.add(index, builderForValue.build()); onChanged(); } else { statusBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder addAllStatus( java.lang.Iterable values) { if (statusBuilder_ == null) { ensureStatusIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, status_); onChanged(); } else { statusBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder clearStatus() { if (statusBuilder_ == null) { status_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { statusBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public Builder removeStatus(int index) { if (statusBuilder_ == null) { ensureStatusIsMutable(); status_.remove(index); onChanged(); } else { statusBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder getStatusBuilder( int index) { return getStatusFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder getStatusOrBuilder( int index) { if (statusBuilder_ == null) { return status_.get(index); } else { return statusBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public java.util.List getStatusOrBuilderList() { if (statusBuilder_ != null) { return statusBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(status_); } } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addStatusBuilder() { return getStatusFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder addStatusBuilder( int index) { return getStatusFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerStatusProto status = 1; */ public java.util.List getStatusBuilderList() { return getStatusFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder> getStatusFieldBuilder() { if (statusBuilder_ == null) { statusBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder>( status_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); status_ = null; } return statusBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerStatusesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerStatusesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainerStatusesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainerStatusesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface IncreaseContainersResourceRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.IncreaseContainersResourceRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ java.util.List getIncreaseContainersList(); /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index); /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ int getIncreaseContainersCount(); /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ java.util.List getIncreaseContainersOrBuilderList(); /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceRequestProto} */ public static final class IncreaseContainersResourceRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.IncreaseContainersResourceRequestProto) IncreaseContainersResourceRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use IncreaseContainersResourceRequestProto.newBuilder() to construct. private IncreaseContainersResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private IncreaseContainersResourceRequestProto() { increaseContainers_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IncreaseContainersResourceRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { increaseContainers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } increaseContainers_.add( input.readMessage(org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { increaseContainers_ = java.util.Collections.unmodifiableList(increaseContainers_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.Builder.class); } public static final int INCREASE_CONTAINERS_FIELD_NUMBER = 1; private java.util.List increaseContainers_; /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public java.util.List getIncreaseContainersList() { return increaseContainers_; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public java.util.List getIncreaseContainersOrBuilderList() { return increaseContainers_; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public int getIncreaseContainersCount() { return increaseContainers_.size(); } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index) { return increaseContainers_.get(index); } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder( int index) { return increaseContainers_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getIncreaseContainersCount(); i++) { if (!getIncreaseContainers(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < increaseContainers_.size(); i++) { output.writeMessage(1, increaseContainers_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < increaseContainers_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, increaseContainers_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto) obj; if (!getIncreaseContainersList() .equals(other.getIncreaseContainersList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIncreaseContainersCount() > 0) { hash = (37 * hash) + INCREASE_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getIncreaseContainersList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.IncreaseContainersResourceRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getIncreaseContainersFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (increaseContainersBuilder_ == null) { increaseContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { increaseContainersBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto(this); int from_bitField0_ = bitField0_; if (increaseContainersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { increaseContainers_ = java.util.Collections.unmodifiableList(increaseContainers_); bitField0_ = (bitField0_ & ~0x00000001); } result.increaseContainers_ = increaseContainers_; } else { result.increaseContainers_ = increaseContainersBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto.getDefaultInstance()) return this; if (increaseContainersBuilder_ == null) { if (!other.increaseContainers_.isEmpty()) { if (increaseContainers_.isEmpty()) { increaseContainers_ = other.increaseContainers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureIncreaseContainersIsMutable(); increaseContainers_.addAll(other.increaseContainers_); } onChanged(); } } else { if (!other.increaseContainers_.isEmpty()) { if (increaseContainersBuilder_.isEmpty()) { increaseContainersBuilder_.dispose(); increaseContainersBuilder_ = null; increaseContainers_ = other.increaseContainers_; bitField0_ = (bitField0_ & ~0x00000001); increaseContainersBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getIncreaseContainersFieldBuilder() : null; } else { increaseContainersBuilder_.addAllMessages(other.increaseContainers_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getIncreaseContainersCount(); i++) { if (!getIncreaseContainers(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List increaseContainers_ = java.util.Collections.emptyList(); private void ensureIncreaseContainersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { increaseContainers_ = new java.util.ArrayList(increaseContainers_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> increaseContainersBuilder_; /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public java.util.List getIncreaseContainersList() { if (increaseContainersBuilder_ == null) { return java.util.Collections.unmodifiableList(increaseContainers_); } else { return increaseContainersBuilder_.getMessageList(); } } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public int getIncreaseContainersCount() { if (increaseContainersBuilder_ == null) { return increaseContainers_.size(); } else { return increaseContainersBuilder_.getCount(); } } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getIncreaseContainers(int index) { if (increaseContainersBuilder_ == null) { return increaseContainers_.get(index); } else { return increaseContainersBuilder_.getMessage(index); } } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder setIncreaseContainers( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (increaseContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIncreaseContainersIsMutable(); increaseContainers_.set(index, value); onChanged(); } else { increaseContainersBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder setIncreaseContainers( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (increaseContainersBuilder_ == null) { ensureIncreaseContainersIsMutable(); increaseContainers_.set(index, builderForValue.build()); onChanged(); } else { increaseContainersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder addIncreaseContainers(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (increaseContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIncreaseContainersIsMutable(); increaseContainers_.add(value); onChanged(); } else { increaseContainersBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder addIncreaseContainers( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (increaseContainersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIncreaseContainersIsMutable(); increaseContainers_.add(index, value); onChanged(); } else { increaseContainersBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder addIncreaseContainers( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (increaseContainersBuilder_ == null) { ensureIncreaseContainersIsMutable(); increaseContainers_.add(builderForValue.build()); onChanged(); } else { increaseContainersBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder addIncreaseContainers( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (increaseContainersBuilder_ == null) { ensureIncreaseContainersIsMutable(); increaseContainers_.add(index, builderForValue.build()); onChanged(); } else { increaseContainersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder addAllIncreaseContainers( java.lang.Iterable values) { if (increaseContainersBuilder_ == null) { ensureIncreaseContainersIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, increaseContainers_); onChanged(); } else { increaseContainersBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder clearIncreaseContainers() { if (increaseContainersBuilder_ == null) { increaseContainers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { increaseContainersBuilder_.clear(); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public Builder removeIncreaseContainers(int index) { if (increaseContainersBuilder_ == null) { ensureIncreaseContainersIsMutable(); increaseContainers_.remove(index); onChanged(); } else { increaseContainersBuilder_.remove(index); } return this; } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getIncreaseContainersBuilder( int index) { return getIncreaseContainersFieldBuilder().getBuilder(index); } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getIncreaseContainersOrBuilder( int index) { if (increaseContainersBuilder_ == null) { return increaseContainers_.get(index); } else { return increaseContainersBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public java.util.List getIncreaseContainersOrBuilderList() { if (increaseContainersBuilder_ != null) { return increaseContainersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(increaseContainers_); } } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addIncreaseContainersBuilder() { return getIncreaseContainersFieldBuilder().addBuilder( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()); } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addIncreaseContainersBuilder( int index) { return getIncreaseContainersFieldBuilder().addBuilder( index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()); } /** * repeated .hadoop.common.TokenProto increase_containers = 1; */ public java.util.List getIncreaseContainersBuilderList() { return getIncreaseContainersFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getIncreaseContainersFieldBuilder() { if (increaseContainersBuilder_ == null) { increaseContainersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( increaseContainers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); increaseContainers_ = null; } return increaseContainersBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.IncreaseContainersResourceRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.IncreaseContainersResourceRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public IncreaseContainersResourceRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new IncreaseContainersResourceRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface IncreaseContainersResourceResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.IncreaseContainersResourceResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ int getSucceededRequestsCount(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceResponseProto} */ public static final class IncreaseContainersResourceResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.IncreaseContainersResourceResponseProto) IncreaseContainersResourceResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use IncreaseContainersResourceResponseProto.newBuilder() to construct. private IncreaseContainersResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private IncreaseContainersResourceResponseProto() { succeededRequests_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IncreaseContainersResourceResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } succeededRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.Builder.class); } public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1; private java.util.List succeededRequests_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { return succeededRequests_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { return succeededRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { return succeededRequests_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 2; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < succeededRequests_.size(); i++) { output.writeMessage(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(2, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < succeededRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto) obj; if (!getSucceededRequestsList() .equals(other.getSucceededRequestsList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSucceededRequestsCount() > 0) { hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getSucceededRequestsList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.IncreaseContainersResourceResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.IncreaseContainersResourceResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getSucceededRequestsFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { succeededRequestsBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto(this); int from_bitField0_ = bitField0_; if (succeededRequestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); bitField0_ = (bitField0_ & ~0x00000001); } result.succeededRequests_ = succeededRequests_; } else { result.succeededRequests_ = succeededRequestsBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto.getDefaultInstance()) return this; if (succeededRequestsBuilder_ == null) { if (!other.succeededRequests_.isEmpty()) { if (succeededRequests_.isEmpty()) { succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSucceededRequestsIsMutable(); succeededRequests_.addAll(other.succeededRequests_); } onChanged(); } } else { if (!other.succeededRequests_.isEmpty()) { if (succeededRequestsBuilder_.isEmpty()) { succeededRequestsBuilder_.dispose(); succeededRequestsBuilder_ = null; succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); succeededRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSucceededRequestsFieldBuilder() : null; } else { succeededRequestsBuilder_.addAllMessages(other.succeededRequests_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List succeededRequests_ = java.util.Collections.emptyList(); private void ensureSucceededRequestsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(succeededRequests_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { if (succeededRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(succeededRequests_); } else { return succeededRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { if (succeededRequestsBuilder_ == null) { return succeededRequests_.size(); } else { return succeededRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, value); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(value); onChanged(); } else { succeededRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, value); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addAllSucceededRequests( java.lang.Iterable values) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, succeededRequests_); onChanged(); } else { succeededRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder clearSucceededRequests() { if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { succeededRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder removeSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.remove(index); onChanged(); } else { succeededRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { if (succeededRequestsBuilder_ != null) { return succeededRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(succeededRequests_); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() { return getSucceededRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsBuilderList() { return getSucceededRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getSucceededRequestsFieldBuilder() { if (succeededRequestsBuilder_ == null) { succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( succeededRequests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); succeededRequests_ = null; } return succeededRequestsBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.IncreaseContainersResourceResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.IncreaseContainersResourceResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public IncreaseContainersResourceResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new IncreaseContainersResourceResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerUpdateRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerUpdateRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ java.util.List getUpdateContainerTokenList(); /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index); /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ int getUpdateContainerTokenCount(); /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ java.util.List getUpdateContainerTokenOrBuilderList(); /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ContainerUpdateRequestProto} */ public static final class ContainerUpdateRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerUpdateRequestProto) ContainerUpdateRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerUpdateRequestProto.newBuilder() to construct. private ContainerUpdateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerUpdateRequestProto() { updateContainerToken_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ContainerUpdateRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { updateContainerToken_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } updateContainerToken_.add( input.readMessage(org.apache.hadoop.security.proto.SecurityProtos.TokenProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { updateContainerToken_ = java.util.Collections.unmodifiableList(updateContainerToken_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.Builder.class); } public static final int UPDATE_CONTAINER_TOKEN_FIELD_NUMBER = 1; private java.util.List updateContainerToken_; /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public java.util.List getUpdateContainerTokenList() { return updateContainerToken_; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public java.util.List getUpdateContainerTokenOrBuilderList() { return updateContainerToken_; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public int getUpdateContainerTokenCount() { return updateContainerToken_.size(); } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index) { return updateContainerToken_.get(index); } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder( int index) { return updateContainerToken_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getUpdateContainerTokenCount(); i++) { if (!getUpdateContainerToken(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < updateContainerToken_.size(); i++) { output.writeMessage(1, updateContainerToken_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < updateContainerToken_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, updateContainerToken_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto) obj; if (!getUpdateContainerTokenList() .equals(other.getUpdateContainerTokenList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getUpdateContainerTokenCount() > 0) { hash = (37 * hash) + UPDATE_CONTAINER_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getUpdateContainerTokenList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerUpdateRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerUpdateRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateContainerTokenFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (updateContainerTokenBuilder_ == null) { updateContainerToken_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { updateContainerTokenBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto(this); int from_bitField0_ = bitField0_; if (updateContainerTokenBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { updateContainerToken_ = java.util.Collections.unmodifiableList(updateContainerToken_); bitField0_ = (bitField0_ & ~0x00000001); } result.updateContainerToken_ = updateContainerToken_; } else { result.updateContainerToken_ = updateContainerTokenBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto.getDefaultInstance()) return this; if (updateContainerTokenBuilder_ == null) { if (!other.updateContainerToken_.isEmpty()) { if (updateContainerToken_.isEmpty()) { updateContainerToken_ = other.updateContainerToken_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureUpdateContainerTokenIsMutable(); updateContainerToken_.addAll(other.updateContainerToken_); } onChanged(); } } else { if (!other.updateContainerToken_.isEmpty()) { if (updateContainerTokenBuilder_.isEmpty()) { updateContainerTokenBuilder_.dispose(); updateContainerTokenBuilder_ = null; updateContainerToken_ = other.updateContainerToken_; bitField0_ = (bitField0_ & ~0x00000001); updateContainerTokenBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUpdateContainerTokenFieldBuilder() : null; } else { updateContainerTokenBuilder_.addAllMessages(other.updateContainerToken_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getUpdateContainerTokenCount(); i++) { if (!getUpdateContainerToken(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List updateContainerToken_ = java.util.Collections.emptyList(); private void ensureUpdateContainerTokenIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { updateContainerToken_ = new java.util.ArrayList(updateContainerToken_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> updateContainerTokenBuilder_; /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public java.util.List getUpdateContainerTokenList() { if (updateContainerTokenBuilder_ == null) { return java.util.Collections.unmodifiableList(updateContainerToken_); } else { return updateContainerTokenBuilder_.getMessageList(); } } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public int getUpdateContainerTokenCount() { if (updateContainerTokenBuilder_ == null) { return updateContainerToken_.size(); } else { return updateContainerTokenBuilder_.getCount(); } } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getUpdateContainerToken(int index) { if (updateContainerTokenBuilder_ == null) { return updateContainerToken_.get(index); } else { return updateContainerTokenBuilder_.getMessage(index); } } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder setUpdateContainerToken( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (updateContainerTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateContainerTokenIsMutable(); updateContainerToken_.set(index, value); onChanged(); } else { updateContainerTokenBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder setUpdateContainerToken( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (updateContainerTokenBuilder_ == null) { ensureUpdateContainerTokenIsMutable(); updateContainerToken_.set(index, builderForValue.build()); onChanged(); } else { updateContainerTokenBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder addUpdateContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (updateContainerTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateContainerTokenIsMutable(); updateContainerToken_.add(value); onChanged(); } else { updateContainerTokenBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder addUpdateContainerToken( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (updateContainerTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUpdateContainerTokenIsMutable(); updateContainerToken_.add(index, value); onChanged(); } else { updateContainerTokenBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder addUpdateContainerToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (updateContainerTokenBuilder_ == null) { ensureUpdateContainerTokenIsMutable(); updateContainerToken_.add(builderForValue.build()); onChanged(); } else { updateContainerTokenBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder addUpdateContainerToken( int index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (updateContainerTokenBuilder_ == null) { ensureUpdateContainerTokenIsMutable(); updateContainerToken_.add(index, builderForValue.build()); onChanged(); } else { updateContainerTokenBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder addAllUpdateContainerToken( java.lang.Iterable values) { if (updateContainerTokenBuilder_ == null) { ensureUpdateContainerTokenIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, updateContainerToken_); onChanged(); } else { updateContainerTokenBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder clearUpdateContainerToken() { if (updateContainerTokenBuilder_ == null) { updateContainerToken_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { updateContainerTokenBuilder_.clear(); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public Builder removeUpdateContainerToken(int index) { if (updateContainerTokenBuilder_ == null) { ensureUpdateContainerTokenIsMutable(); updateContainerToken_.remove(index); onChanged(); } else { updateContainerTokenBuilder_.remove(index); } return this; } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getUpdateContainerTokenBuilder( int index) { return getUpdateContainerTokenFieldBuilder().getBuilder(index); } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getUpdateContainerTokenOrBuilder( int index) { if (updateContainerTokenBuilder_ == null) { return updateContainerToken_.get(index); } else { return updateContainerTokenBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public java.util.List getUpdateContainerTokenOrBuilderList() { if (updateContainerTokenBuilder_ != null) { return updateContainerTokenBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(updateContainerToken_); } } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addUpdateContainerTokenBuilder() { return getUpdateContainerTokenFieldBuilder().addBuilder( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()); } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder addUpdateContainerTokenBuilder( int index) { return getUpdateContainerTokenFieldBuilder().addBuilder( index, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()); } /** * repeated .hadoop.common.TokenProto update_container_token = 1; */ public java.util.List getUpdateContainerTokenBuilderList() { return getUpdateContainerTokenFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getUpdateContainerTokenFieldBuilder() { if (updateContainerTokenBuilder_ == null) { updateContainerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( updateContainerToken_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); updateContainerToken_ = null; } return updateContainerTokenBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerUpdateRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerUpdateRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerUpdateRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ContainerUpdateRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerUpdateResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerUpdateResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ int getSucceededRequestsCount(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ java.util.List getSucceededRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ContainerUpdateResponseProto} */ public static final class ContainerUpdateResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerUpdateResponseProto) ContainerUpdateResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerUpdateResponseProto.newBuilder() to construct. private ContainerUpdateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerUpdateResponseProto() { succeededRequests_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ContainerUpdateResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } succeededRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.Builder.class); } public static final int SUCCEEDED_REQUESTS_FIELD_NUMBER = 1; private java.util.List succeededRequests_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { return succeededRequests_; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { return succeededRequests_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { return succeededRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { return succeededRequests_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 2; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < succeededRequests_.size(); i++) { output.writeMessage(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(2, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < succeededRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, succeededRequests_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto) obj; if (!getSucceededRequestsList() .equals(other.getSucceededRequestsList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSucceededRequestsCount() > 0) { hash = (37 * hash) + SUCCEEDED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getSucceededRequestsList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerUpdateResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerUpdateResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getSucceededRequestsFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { succeededRequestsBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto(this); int from_bitField0_ = bitField0_; if (succeededRequestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = java.util.Collections.unmodifiableList(succeededRequests_); bitField0_ = (bitField0_ & ~0x00000001); } result.succeededRequests_ = succeededRequests_; } else { result.succeededRequests_ = succeededRequestsBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto.getDefaultInstance()) return this; if (succeededRequestsBuilder_ == null) { if (!other.succeededRequests_.isEmpty()) { if (succeededRequests_.isEmpty()) { succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSucceededRequestsIsMutable(); succeededRequests_.addAll(other.succeededRequests_); } onChanged(); } } else { if (!other.succeededRequests_.isEmpty()) { if (succeededRequestsBuilder_.isEmpty()) { succeededRequestsBuilder_.dispose(); succeededRequestsBuilder_ = null; succeededRequests_ = other.succeededRequests_; bitField0_ = (bitField0_ & ~0x00000001); succeededRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSucceededRequestsFieldBuilder() : null; } else { succeededRequestsBuilder_.addAllMessages(other.succeededRequests_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List succeededRequests_ = java.util.Collections.emptyList(); private void ensureSucceededRequestsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { succeededRequests_ = new java.util.ArrayList(succeededRequests_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> succeededRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsList() { if (succeededRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(succeededRequests_); } else { return succeededRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public int getSucceededRequestsCount() { if (succeededRequestsBuilder_ == null) { return succeededRequests_.size(); } else { return succeededRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, value); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder setSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.set(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(value); onChanged(); } else { succeededRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (succeededRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, value); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addSucceededRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.add(index, builderForValue.build()); onChanged(); } else { succeededRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder addAllSucceededRequests( java.lang.Iterable values) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, succeededRequests_); onChanged(); } else { succeededRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder clearSucceededRequests() { if (succeededRequestsBuilder_ == null) { succeededRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { succeededRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public Builder removeSucceededRequests(int index) { if (succeededRequestsBuilder_ == null) { ensureSucceededRequestsIsMutable(); succeededRequests_.remove(index); onChanged(); } else { succeededRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getSucceededRequestsOrBuilder( int index) { if (succeededRequestsBuilder_ == null) { return succeededRequests_.get(index); } else { return succeededRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsOrBuilderList() { if (succeededRequestsBuilder_ != null) { return succeededRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(succeededRequests_); } } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder() { return getSucceededRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addSucceededRequestsBuilder( int index) { return getSucceededRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto succeeded_requests = 1; */ public java.util.List getSucceededRequestsBuilderList() { return getSucceededRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getSucceededRequestsFieldBuilder() { if (succeededRequestsBuilder_ == null) { succeededRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( succeededRequests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); succeededRequests_ = null; } return succeededRequestsBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerUpdateResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerUpdateResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerUpdateResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ContainerUpdateResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationAttemptReportRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptReportRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ boolean hasApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportRequestProto} */ public static final class GetApplicationAttemptReportRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptReportRequestProto) GetApplicationAttemptReportRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationAttemptReportRequestProto.newBuilder() to construct. private GetApplicationAttemptReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationAttemptReportRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationAttemptReportRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationAttemptId_.toBuilder(); } applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationAttemptId_); applicationAttemptId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationAttemptId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationAttemptId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto) obj; if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false; if (hasApplicationAttemptId()) { if (!getApplicationAttemptId() .equals(other.getApplicationAttemptId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationAttemptId()) { hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptReportRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationAttemptIdBuilder_ == null) { result.applicationAttemptId_ = applicationAttemptId_; } else { result.applicationAttemptId_ = applicationAttemptIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto.getDefaultInstance()) return this; if (other.hasApplicationAttemptId()) { mergeApplicationAttemptId(other.getApplicationAttemptId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } else { return applicationAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationAttemptId_ = value; onChanged(); } else { applicationAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = builderForValue.build(); onChanged(); } else { applicationAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationAttemptId_ != null && applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial(); } else { applicationAttemptId_ = value; } onChanged(); } else { applicationAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder clearApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; onChanged(); } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { if (applicationAttemptIdBuilder_ != null) { return applicationAttemptIdBuilder_.getMessageOrBuilder(); } else { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getApplicationAttemptIdFieldBuilder() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getApplicationAttemptId(), getParentForChildren(), isClean()); applicationAttemptId_ = null; } return applicationAttemptIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptReportRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptReportRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationAttemptReportRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationAttemptReportRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationAttemptReportResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptReportResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ boolean hasApplicationAttemptReport(); /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport(); /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportResponseProto} */ public static final class GetApplicationAttemptReportResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptReportResponseProto) GetApplicationAttemptReportResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationAttemptReportResponseProto.newBuilder() to construct. private GetApplicationAttemptReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationAttemptReportResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationAttemptReportResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationAttemptReport_.toBuilder(); } applicationAttemptReport_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationAttemptReport_); applicationAttemptReport_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ATTEMPT_REPORT_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto applicationAttemptReport_; /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public boolean hasApplicationAttemptReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport() { return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_; } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder() { return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationAttemptReport()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationAttemptReport()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto) obj; if (hasApplicationAttemptReport() != other.hasApplicationAttemptReport()) return false; if (hasApplicationAttemptReport()) { if (!getApplicationAttemptReport() .equals(other.getApplicationAttemptReport())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationAttemptReport()) { hash = (37 * hash) + APPLICATION_ATTEMPT_REPORT_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptReport().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptReportResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptReportResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptReportFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationAttemptReportBuilder_ == null) { applicationAttemptReport_ = null; } else { applicationAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationAttemptReportBuilder_ == null) { result.applicationAttemptReport_ = applicationAttemptReport_; } else { result.applicationAttemptReport_ = applicationAttemptReportBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto.getDefaultInstance()) return this; if (other.hasApplicationAttemptReport()) { mergeApplicationAttemptReport(other.getApplicationAttemptReport()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto applicationAttemptReport_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> applicationAttemptReportBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public boolean hasApplicationAttemptReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttemptReport() { if (applicationAttemptReportBuilder_ == null) { return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_; } else { return applicationAttemptReportBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public Builder setApplicationAttemptReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) { if (applicationAttemptReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationAttemptReport_ = value; onChanged(); } else { applicationAttemptReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public Builder setApplicationAttemptReport( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) { if (applicationAttemptReportBuilder_ == null) { applicationAttemptReport_ = builderForValue.build(); onChanged(); } else { applicationAttemptReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public Builder mergeApplicationAttemptReport(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) { if (applicationAttemptReportBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationAttemptReport_ != null && applicationAttemptReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()) { applicationAttemptReport_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.newBuilder(applicationAttemptReport_).mergeFrom(value).buildPartial(); } else { applicationAttemptReport_ = value; } onChanged(); } else { applicationAttemptReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public Builder clearApplicationAttemptReport() { if (applicationAttemptReportBuilder_ == null) { applicationAttemptReport_ = null; onChanged(); } else { applicationAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder getApplicationAttemptReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationAttemptReportFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptReportOrBuilder() { if (applicationAttemptReportBuilder_ != null) { return applicationAttemptReportBuilder_.getMessageOrBuilder(); } else { return applicationAttemptReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance() : applicationAttemptReport_; } } /** * optional .hadoop.yarn.ApplicationAttemptReportProto application_attempt_report = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> getApplicationAttemptReportFieldBuilder() { if (applicationAttemptReportBuilder_ == null) { applicationAttemptReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder>( getApplicationAttemptReport(), getParentForChildren(), isClean()); applicationAttemptReport_ = null; } return applicationAttemptReportBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptReportResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptReportResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationAttemptReportResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationAttemptReportResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationAttemptsRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptsRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsRequestProto} */ public static final class GetApplicationAttemptsRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptsRequestProto) GetApplicationAttemptsRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationAttemptsRequestProto.newBuilder() to construct. private GetApplicationAttemptsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationAttemptsRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationAttemptsRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptsRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptsRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptsRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationAttemptsRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationAttemptsRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetApplicationAttemptsResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetApplicationAttemptsResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ java.util.List getApplicationAttemptsList(); /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index); /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ int getApplicationAttemptsCount(); /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ java.util.List getApplicationAttemptsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsResponseProto} */ public static final class GetApplicationAttemptsResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetApplicationAttemptsResponseProto) GetApplicationAttemptsResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetApplicationAttemptsResponseProto.newBuilder() to construct. private GetApplicationAttemptsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetApplicationAttemptsResponseProto() { applicationAttempts_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetApplicationAttemptsResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { applicationAttempts_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } applicationAttempts_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { applicationAttempts_ = java.util.Collections.unmodifiableList(applicationAttempts_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.Builder.class); } public static final int APPLICATION_ATTEMPTS_FIELD_NUMBER = 1; private java.util.List applicationAttempts_; /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public java.util.List getApplicationAttemptsList() { return applicationAttempts_; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public java.util.List getApplicationAttemptsOrBuilderList() { return applicationAttempts_; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public int getApplicationAttemptsCount() { return applicationAttempts_.size(); } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index) { return applicationAttempts_.get(index); } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder( int index) { return applicationAttempts_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < applicationAttempts_.size(); i++) { output.writeMessage(1, applicationAttempts_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < applicationAttempts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, applicationAttempts_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto) obj; if (!getApplicationAttemptsList() .equals(other.getApplicationAttemptsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getApplicationAttemptsCount() > 0) { hash = (37 * hash) + APPLICATION_ATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetApplicationAttemptsResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetApplicationAttemptsResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationAttemptsBuilder_ == null) { applicationAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { applicationAttemptsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto(this); int from_bitField0_ = bitField0_; if (applicationAttemptsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { applicationAttempts_ = java.util.Collections.unmodifiableList(applicationAttempts_); bitField0_ = (bitField0_ & ~0x00000001); } result.applicationAttempts_ = applicationAttempts_; } else { result.applicationAttempts_ = applicationAttemptsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto.getDefaultInstance()) return this; if (applicationAttemptsBuilder_ == null) { if (!other.applicationAttempts_.isEmpty()) { if (applicationAttempts_.isEmpty()) { applicationAttempts_ = other.applicationAttempts_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureApplicationAttemptsIsMutable(); applicationAttempts_.addAll(other.applicationAttempts_); } onChanged(); } } else { if (!other.applicationAttempts_.isEmpty()) { if (applicationAttemptsBuilder_.isEmpty()) { applicationAttemptsBuilder_.dispose(); applicationAttemptsBuilder_ = null; applicationAttempts_ = other.applicationAttempts_; bitField0_ = (bitField0_ & ~0x00000001); applicationAttemptsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationAttemptsFieldBuilder() : null; } else { applicationAttemptsBuilder_.addAllMessages(other.applicationAttempts_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List applicationAttempts_ = java.util.Collections.emptyList(); private void ensureApplicationAttemptsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { applicationAttempts_ = new java.util.ArrayList(applicationAttempts_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> applicationAttemptsBuilder_; /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public java.util.List getApplicationAttemptsList() { if (applicationAttemptsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationAttempts_); } else { return applicationAttemptsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public int getApplicationAttemptsCount() { if (applicationAttemptsBuilder_ == null) { return applicationAttempts_.size(); } else { return applicationAttemptsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getApplicationAttempts(int index) { if (applicationAttemptsBuilder_ == null) { return applicationAttempts_.get(index); } else { return applicationAttemptsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder setApplicationAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) { if (applicationAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationAttemptsIsMutable(); applicationAttempts_.set(index, value); onChanged(); } else { applicationAttemptsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder setApplicationAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) { if (applicationAttemptsBuilder_ == null) { ensureApplicationAttemptsIsMutable(); applicationAttempts_.set(index, builderForValue.build()); onChanged(); } else { applicationAttemptsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder addApplicationAttempts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) { if (applicationAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationAttemptsIsMutable(); applicationAttempts_.add(value); onChanged(); } else { applicationAttemptsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder addApplicationAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto value) { if (applicationAttemptsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationAttemptsIsMutable(); applicationAttempts_.add(index, value); onChanged(); } else { applicationAttemptsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder addApplicationAttempts( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) { if (applicationAttemptsBuilder_ == null) { ensureApplicationAttemptsIsMutable(); applicationAttempts_.add(builderForValue.build()); onChanged(); } else { applicationAttemptsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder addApplicationAttempts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder builderForValue) { if (applicationAttemptsBuilder_ == null) { ensureApplicationAttemptsIsMutable(); applicationAttempts_.add(index, builderForValue.build()); onChanged(); } else { applicationAttemptsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder addAllApplicationAttempts( java.lang.Iterable values) { if (applicationAttemptsBuilder_ == null) { ensureApplicationAttemptsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationAttempts_); onChanged(); } else { applicationAttemptsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder clearApplicationAttempts() { if (applicationAttemptsBuilder_ == null) { applicationAttempts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { applicationAttemptsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public Builder removeApplicationAttempts(int index) { if (applicationAttemptsBuilder_ == null) { ensureApplicationAttemptsIsMutable(); applicationAttempts_.remove(index); onChanged(); } else { applicationAttemptsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder getApplicationAttemptsBuilder( int index) { return getApplicationAttemptsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder getApplicationAttemptsOrBuilder( int index) { if (applicationAttemptsBuilder_ == null) { return applicationAttempts_.get(index); } else { return applicationAttemptsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public java.util.List getApplicationAttemptsOrBuilderList() { if (applicationAttemptsBuilder_ != null) { return applicationAttemptsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationAttempts_); } } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder addApplicationAttemptsBuilder() { return getApplicationAttemptsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder addApplicationAttemptsBuilder( int index) { return getApplicationAttemptsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationAttemptReportProto application_attempts = 1; */ public java.util.List getApplicationAttemptsBuilderList() { return getApplicationAttemptsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder> getApplicationAttemptsFieldBuilder() { if (applicationAttemptsBuilder_ == null) { applicationAttemptsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder>( applicationAttempts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); applicationAttempts_ = null; } return applicationAttemptsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetApplicationAttemptsResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetApplicationAttemptsResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetApplicationAttemptsResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetApplicationAttemptsResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainerReportRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerReportRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetContainerReportRequestProto} */ public static final class GetContainerReportRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerReportRequestProto) GetContainerReportRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainerReportRequestProto.newBuilder() to construct. private GetContainerReportRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainerReportRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainerReportRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainerReportRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerReportRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerReportRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerReportRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainerReportRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainerReportRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainerReportResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainerReportResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ boolean hasContainerReport(); /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport(); /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetContainerReportResponseProto} */ public static final class GetContainerReportResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainerReportResponseProto) GetContainerReportResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainerReportResponseProto.newBuilder() to construct. private GetContainerReportResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainerReportResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainerReportResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerReport_.toBuilder(); } containerReport_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerReport_); containerReport_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.Builder.class); } private int bitField0_; public static final int CONTAINER_REPORT_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto containerReport_; /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public boolean hasContainerReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport() { return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_; } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder() { return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasContainerReport()) { if (!getContainerReport().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerReport()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerReport()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto) obj; if (hasContainerReport() != other.hasContainerReport()) return false; if (hasContainerReport()) { if (!getContainerReport() .equals(other.getContainerReport())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerReport()) { hash = (37 * hash) + CONTAINER_REPORT_FIELD_NUMBER; hash = (53 * hash) + getContainerReport().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainerReportResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainerReportResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerReportFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerReportBuilder_ == null) { containerReport_ = null; } else { containerReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerReportBuilder_ == null) { result.containerReport_ = containerReport_; } else { result.containerReport_ = containerReportBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto.getDefaultInstance()) return this; if (other.hasContainerReport()) { mergeContainerReport(other.getContainerReport()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasContainerReport()) { if (!getContainerReport().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto containerReport_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> containerReportBuilder_; /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public boolean hasContainerReport() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainerReport() { if (containerReportBuilder_ == null) { return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_; } else { return containerReportBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public Builder setContainerReport(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) { if (containerReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerReport_ = value; onChanged(); } else { containerReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public Builder setContainerReport( org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) { if (containerReportBuilder_ == null) { containerReport_ = builderForValue.build(); onChanged(); } else { containerReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public Builder mergeContainerReport(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) { if (containerReportBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerReport_ != null && containerReport_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()) { containerReport_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.newBuilder(containerReport_).mergeFrom(value).buildPartial(); } else { containerReport_ = value; } onChanged(); } else { containerReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public Builder clearContainerReport() { if (containerReportBuilder_ == null) { containerReport_ = null; onChanged(); } else { containerReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder getContainerReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerReportFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainerReportOrBuilder() { if (containerReportBuilder_ != null) { return containerReportBuilder_.getMessageOrBuilder(); } else { return containerReport_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance() : containerReport_; } } /** * optional .hadoop.yarn.ContainerReportProto container_report = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> getContainerReportFieldBuilder() { if (containerReportBuilder_ == null) { containerReportBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder>( getContainerReport(), getParentForChildren(), isClean()); containerReport_ = null; } return containerReportBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainerReportResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainerReportResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainerReportResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainerReportResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainersRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainersRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ boolean hasApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetContainersRequestProto} */ public static final class GetContainersRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainersRequestProto) GetContainersRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainersRequestProto.newBuilder() to construct. private GetContainersRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainersRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainersRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationAttemptId_.toBuilder(); } applicationAttemptId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationAttemptId_); applicationAttemptId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationAttemptId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationAttemptId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto) obj; if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false; if (hasApplicationAttemptId()) { if (!getApplicationAttemptId() .equals(other.getApplicationAttemptId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationAttemptId()) { hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainersRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainersRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationAttemptIdBuilder_ == null) { result.applicationAttemptId_ = applicationAttemptId_; } else { result.applicationAttemptId_ = applicationAttemptIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto.getDefaultInstance()) return this; if (other.hasApplicationAttemptId()) { mergeApplicationAttemptId(other.getApplicationAttemptId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } else { return applicationAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationAttemptId_ = value; onChanged(); } else { applicationAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = builderForValue.build(); onChanged(); } else { applicationAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationAttemptId_ != null && applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { applicationAttemptId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder(applicationAttemptId_).mergeFrom(value).buildPartial(); } else { applicationAttemptId_ = value; } onChanged(); } else { applicationAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder clearApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = null; onChanged(); } else { applicationAttemptIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { if (applicationAttemptIdBuilder_ != null) { return applicationAttemptIdBuilder_.getMessageOrBuilder(); } else { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getApplicationAttemptIdFieldBuilder() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getApplicationAttemptId(), getParentForChildren(), isClean()); applicationAttemptId_ = null; } return applicationAttemptIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainersRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainersRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainersRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainersRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetContainersResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetContainersResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ java.util.List getContainersList(); /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index); /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ int getContainersCount(); /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ java.util.List getContainersOrBuilderList(); /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetContainersResponseProto} */ public static final class GetContainersResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetContainersResponseProto) GetContainersResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetContainersResponseProto.newBuilder() to construct. private GetContainersResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetContainersResponseProto() { containers_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetContainersResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { containers_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } containers_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { containers_ = java.util.Collections.unmodifiableList(containers_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.Builder.class); } public static final int CONTAINERS_FIELD_NUMBER = 1; private java.util.List containers_; /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public java.util.List getContainersList() { return containers_; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public java.util.List getContainersOrBuilderList() { return containers_; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public int getContainersCount() { return containers_.size(); } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index) { return containers_.get(index); } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder( int index) { return containers_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getContainersCount(); i++) { if (!getContainers(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < containers_.size(); i++) { output.writeMessage(1, containers_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < containers_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, containers_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto) obj; if (!getContainersList() .equals(other.getContainersList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContainersCount() > 0) { hash = (37 * hash) + CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getContainersList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetContainersResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetContainersResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainersFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containersBuilder_ == null) { containers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { containersBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetContainersResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto(this); int from_bitField0_ = bitField0_; if (containersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { containers_ = java.util.Collections.unmodifiableList(containers_); bitField0_ = (bitField0_ & ~0x00000001); } result.containers_ = containers_; } else { result.containers_ = containersBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto.getDefaultInstance()) return this; if (containersBuilder_ == null) { if (!other.containers_.isEmpty()) { if (containers_.isEmpty()) { containers_ = other.containers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContainersIsMutable(); containers_.addAll(other.containers_); } onChanged(); } } else { if (!other.containers_.isEmpty()) { if (containersBuilder_.isEmpty()) { containersBuilder_.dispose(); containersBuilder_ = null; containers_ = other.containers_; bitField0_ = (bitField0_ & ~0x00000001); containersBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainersFieldBuilder() : null; } else { containersBuilder_.addAllMessages(other.containers_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getContainersCount(); i++) { if (!getContainers(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List containers_ = java.util.Collections.emptyList(); private void ensureContainersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { containers_ = new java.util.ArrayList(containers_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> containersBuilder_; /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public java.util.List getContainersList() { if (containersBuilder_ == null) { return java.util.Collections.unmodifiableList(containers_); } else { return containersBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public int getContainersCount() { if (containersBuilder_ == null) { return containers_.size(); } else { return containersBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getContainers(int index) { if (containersBuilder_ == null) { return containers_.get(index); } else { return containersBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder setContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) { if (containersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersIsMutable(); containers_.set(index, value); onChanged(); } else { containersBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder setContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) { if (containersBuilder_ == null) { ensureContainersIsMutable(); containers_.set(index, builderForValue.build()); onChanged(); } else { containersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder addContainers(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) { if (containersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersIsMutable(); containers_.add(value); onChanged(); } else { containersBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder addContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto value) { if (containersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainersIsMutable(); containers_.add(index, value); onChanged(); } else { containersBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder addContainers( org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) { if (containersBuilder_ == null) { ensureContainersIsMutable(); containers_.add(builderForValue.build()); onChanged(); } else { containersBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder addContainers( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder builderForValue) { if (containersBuilder_ == null) { ensureContainersIsMutable(); containers_.add(index, builderForValue.build()); onChanged(); } else { containersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder addAllContainers( java.lang.Iterable values) { if (containersBuilder_ == null) { ensureContainersIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containers_); onChanged(); } else { containersBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder clearContainers() { if (containersBuilder_ == null) { containers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { containersBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public Builder removeContainers(int index) { if (containersBuilder_ == null) { ensureContainersIsMutable(); containers_.remove(index); onChanged(); } else { containersBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder getContainersBuilder( int index) { return getContainersFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder getContainersOrBuilder( int index) { if (containersBuilder_ == null) { return containers_.get(index); } else { return containersBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public java.util.List getContainersOrBuilderList() { if (containersBuilder_ != null) { return containersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containers_); } } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder addContainersBuilder() { return getContainersFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder addContainersBuilder( int index) { return getContainersFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerReportProto containers = 1; */ public java.util.List getContainersBuilderList() { return getContainersFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder> getContainersFieldBuilder() { if (containersBuilder_ == null) { containersBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder>( containers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); containers_ = null; } return containersBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetContainersResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetContainersResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetContainersResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetContainersResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UseSharedCacheResourceRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UseSharedCacheResourceRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional string resourceKey = 2; */ boolean hasResourceKey(); /** * optional string resourceKey = 2; */ java.lang.String getResourceKey(); /** * optional string resourceKey = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes(); } /** * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceRequestProto} */ public static final class UseSharedCacheResourceRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UseSharedCacheResourceRequestProto) UseSharedCacheResourceRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UseSharedCacheResourceRequestProto.newBuilder() to construct. private UseSharedCacheResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UseSharedCacheResourceRequestProto() { resourceKey_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UseSharedCacheResourceRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; resourceKey_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATIONID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int RESOURCEKEY_FIELD_NUMBER = 2; private volatile java.lang.Object resourceKey_; /** * optional string resourceKey = 2; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resourceKey = 2; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } } /** * optional string resourceKey = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceKey_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceKey_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasResourceKey() != other.hasResourceKey()) return false; if (hasResourceKey()) { if (!getResourceKey() .equals(other.getResourceKey())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasResourceKey()) { hash = (37 * hash) + RESOURCEKEY_FIELD_NUMBER; hash = (53 * hash) + getResourceKey().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UseSharedCacheResourceRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); resourceKey_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.resourceKey_ = resourceKey_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasResourceKey()) { bitField0_ |= 0x00000002; resourceKey_ = other.resourceKey_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object resourceKey_ = ""; /** * optional string resourceKey = 2; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resourceKey = 2; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string resourceKey = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string resourceKey = 2; */ public Builder setResourceKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; resourceKey_ = value; onChanged(); return this; } /** * optional string resourceKey = 2; */ public Builder clearResourceKey() { bitField0_ = (bitField0_ & ~0x00000002); resourceKey_ = getDefaultInstance().getResourceKey(); onChanged(); return this; } /** * optional string resourceKey = 2; */ public Builder setResourceKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; resourceKey_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UseSharedCacheResourceRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UseSharedCacheResourceRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UseSharedCacheResourceRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UseSharedCacheResourceRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UseSharedCacheResourceResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.UseSharedCacheResourceResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string path = 1; */ boolean hasPath(); /** * optional string path = 1; */ java.lang.String getPath(); /** * optional string path = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getPathBytes(); } /** * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceResponseProto} */ public static final class UseSharedCacheResourceResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.UseSharedCacheResourceResponseProto) UseSharedCacheResourceResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use UseSharedCacheResourceResponseProto.newBuilder() to construct. private UseSharedCacheResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private UseSharedCacheResourceResponseProto() { path_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UseSharedCacheResourceResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; path_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.Builder.class); } private int bitField0_; public static final int PATH_FIELD_NUMBER = 1; private volatile java.lang.Object path_; /** * optional string path = 1; */ public boolean hasPath() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string path = 1; */ public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { path_ = s; } return s; } } /** * optional string path = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, path_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, path_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) obj; if (hasPath() != other.hasPath()) return false; if (hasPath()) { if (!getPath() .equals(other.getPath())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPath()) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.UseSharedCacheResourceResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.UseSharedCacheResourceResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); path_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.path_ = path_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance()) return this; if (other.hasPath()) { bitField0_ |= 0x00000001; path_ = other.path_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object path_ = ""; /** * optional string path = 1; */ public boolean hasPath() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string path = 1; */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { path_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string path = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string path = 1; */ public Builder setPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; path_ = value; onChanged(); return this; } /** * optional string path = 1; */ public Builder clearPath() { bitField0_ = (bitField0_ & ~0x00000001); path_ = getDefaultInstance().getPath(); onChanged(); return this; } /** * optional string path = 1; */ public Builder setPathBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; path_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.UseSharedCacheResourceResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.UseSharedCacheResourceResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public UseSharedCacheResourceResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new UseSharedCacheResourceResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReleaseSharedCacheResourceRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReleaseSharedCacheResourceRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional string resourceKey = 2; */ boolean hasResourceKey(); /** * optional string resourceKey = 2; */ java.lang.String getResourceKey(); /** * optional string resourceKey = 2; */ org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes(); } /** * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceRequestProto} */ public static final class ReleaseSharedCacheResourceRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReleaseSharedCacheResourceRequestProto) ReleaseSharedCacheResourceRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReleaseSharedCacheResourceRequestProto.newBuilder() to construct. private ReleaseSharedCacheResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReleaseSharedCacheResourceRequestProto() { resourceKey_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReleaseSharedCacheResourceRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = applicationId_.toBuilder(); } applicationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(applicationId_); applicationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; resourceKey_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.Builder.class); } private int bitField0_; public static final int APPLICATIONID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int RESOURCEKEY_FIELD_NUMBER = 2; private volatile java.lang.Object resourceKey_; /** * optional string resourceKey = 2; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resourceKey = 2; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } } /** * optional string resourceKey = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceKey_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceKey_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasResourceKey() != other.hasResourceKey()) return false; if (hasResourceKey()) { if (!getResourceKey() .equals(other.getResourceKey())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasResourceKey()) { hash = (37 * hash) + RESOURCEKEY_FIELD_NUMBER; hash = (53 * hash) + getResourceKey().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReleaseSharedCacheResourceRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (applicationIdBuilder_ == null) { applicationId_ = null; } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); resourceKey_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (applicationIdBuilder_ == null) { result.applicationId_ = applicationId_; } else { result.applicationId_ = applicationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.resourceKey_ = resourceKey_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasResourceKey()) { bitField0_ |= 0x00000002; resourceKey_ = other.resourceKey_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; onChanged(); } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); onChanged(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { applicationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder(applicationId_).mergeFrom(value).buildPartial(); } else { applicationId_ = value; } onChanged(); } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder clearApplicationId() { if (applicationIdBuilder_ == null) { applicationId_ = null; onChanged(); } else { applicationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object resourceKey_ = ""; /** * optional string resourceKey = 2; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resourceKey = 2; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string resourceKey = 2; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string resourceKey = 2; */ public Builder setResourceKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; resourceKey_ = value; onChanged(); return this; } /** * optional string resourceKey = 2; */ public Builder clearResourceKey() { bitField0_ = (bitField0_ & ~0x00000002); resourceKey_ = getDefaultInstance().getResourceKey(); onChanged(); return this; } /** * optional string resourceKey = 2; */ public Builder setResourceKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; resourceKey_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReleaseSharedCacheResourceRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReleaseSharedCacheResourceRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReleaseSharedCacheResourceRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReleaseSharedCacheResourceRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReleaseSharedCacheResourceResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReleaseSharedCacheResourceResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceResponseProto} */ public static final class ReleaseSharedCacheResourceResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReleaseSharedCacheResourceResponseProto) ReleaseSharedCacheResourceResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReleaseSharedCacheResourceResponseProto.newBuilder() to construct. private ReleaseSharedCacheResourceResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReleaseSharedCacheResourceResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReleaseSharedCacheResourceResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReleaseSharedCacheResourceResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReleaseSharedCacheResourceResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReleaseSharedCacheResourceResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReleaseSharedCacheResourceResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReleaseSharedCacheResourceResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReleaseSharedCacheResourceResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNewReservationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewReservationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.GetNewReservationRequestProto} */ public static final class GetNewReservationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewReservationRequestProto) GetNewReservationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNewReservationRequestProto.newBuilder() to construct. private GetNewReservationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNewReservationRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNewReservationRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNewReservationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewReservationRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewReservationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewReservationRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNewReservationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNewReservationRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetNewReservationResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetNewReservationResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.GetNewReservationResponseProto} */ public static final class GetNewReservationResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetNewReservationResponseProto) GetNewReservationResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetNewReservationResponseProto.newBuilder() to construct. private GetNewReservationResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetNewReservationResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNewReservationResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = reservationId_.toBuilder(); } reservationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationId_); reservationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.Builder.class); } private int bitField0_; public static final int RESERVATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReservationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getReservationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto) obj; if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetNewReservationResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetNewReservationResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (reservationIdBuilder_ == null) { reservationId_ = null; } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (reservationIdBuilder_ == null) { result.reservationId_ = reservationId_; } else { result.reservationId_ = reservationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto.getDefaultInstance()) return this; if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; onChanged(); } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); onChanged(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { reservationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder(reservationId_).mergeFrom(value).buildPartial(); } else { reservationId_ = value; } onChanged(); } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder clearReservationId() { if (reservationIdBuilder_ == null) { reservationId_ = null; onChanged(); } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetNewReservationResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetNewReservationResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetNewReservationResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetNewReservationResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationSubmissionRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationSubmissionRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string queue = 1; */ boolean hasQueue(); /** * optional string queue = 1; */ java.lang.String getQueue(); /** * optional string queue = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ boolean hasReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ReservationSubmissionRequestProto} */ public static final class ReservationSubmissionRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationSubmissionRequestProto) ReservationSubmissionRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationSubmissionRequestProto.newBuilder() to construct. private ReservationSubmissionRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationSubmissionRequestProto() { queue_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationSubmissionRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; queue_ = bs; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = reservationDefinition_.toBuilder(); } reservationDefinition_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationDefinition_); reservationDefinition_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000004) != 0)) { subBuilder = reservationId_.toBuilder(); } reservationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationId_); reservationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.Builder.class); } private int bitField0_; public static final int QUEUE_FIELD_NUMBER = 1; private volatile java.lang.Object queue_; /** * optional string queue = 1; */ public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queue = 1; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } public static final int RESERVATION_ID_FIELD_NUMBER = 3; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getReservationDefinition()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getReservationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getReservationDefinition()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getReservationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto) obj; if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (hasReservationDefinition() != other.hasReservationDefinition()) return false; if (hasReservationDefinition()) { if (!getReservationDefinition() .equals(other.getReservationDefinition())) return false; } if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasReservationDefinition()) { hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER; hash = (53 * hash) + getReservationDefinition().hashCode(); } if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationSubmissionRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationSubmissionRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationDefinitionFieldBuilder(); getReservationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); queue_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = null; } else { reservationDefinitionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (reservationIdBuilder_ == null) { reservationId_ = null; } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.queue_ = queue_; if (((from_bitField0_ & 0x00000002) != 0)) { if (reservationDefinitionBuilder_ == null) { result.reservationDefinition_ = reservationDefinition_; } else { result.reservationDefinition_ = reservationDefinitionBuilder_.build(); } to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { if (reservationIdBuilder_ == null) { result.reservationId_ = reservationId_; } else { result.reservationId_ = reservationIdBuilder_.build(); } to_bitField0_ |= 0x00000004; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto.getDefaultInstance()) return this; if (other.hasQueue()) { bitField0_ |= 0x00000001; queue_ = other.queue_; onChanged(); } if (other.hasReservationDefinition()) { mergeReservationDefinition(other.getReservationDefinition()); } if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object queue_ = ""; /** * optional string queue = 1; */ public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queue = 1; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 1; */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queue_ = value; onChanged(); return this; } /** * optional string queue = 1; */ public Builder clearQueue() { bitField0_ = (bitField0_ & ~0x00000001); queue_ = getDefaultInstance().getQueue(); onChanged(); return this; } /** * optional string queue = 1; */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queue_ = value; onChanged(); return this; } private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { if (reservationDefinitionBuilder_ == null) { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } else { return reservationDefinitionBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public Builder setReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationDefinition_ = value; onChanged(); } else { reservationDefinitionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public Builder setReservationDefinition( org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) { if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = builderForValue.build(); onChanged(); } else { reservationDefinitionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public Builder mergeReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && reservationDefinition_ != null && reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) { reservationDefinition_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.newBuilder(reservationDefinition_).mergeFrom(value).buildPartial(); } else { reservationDefinition_ = value; } onChanged(); } else { reservationDefinitionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public Builder clearReservationDefinition() { if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = null; onChanged(); } else { reservationDefinitionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getReservationDefinitionFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { if (reservationDefinitionBuilder_ != null) { return reservationDefinitionBuilder_.getMessageOrBuilder(); } else { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> getReservationDefinitionFieldBuilder() { if (reservationDefinitionBuilder_ == null) { reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>( getReservationDefinition(), getParentForChildren(), isClean()); reservationDefinition_ = null; } return reservationDefinitionBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; onChanged(); } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); onChanged(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { reservationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder(reservationId_).mergeFrom(value).buildPartial(); } else { reservationId_ = value; } onChanged(); } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public Builder clearReservationId() { if (reservationIdBuilder_ == null) { reservationId_ = null; onChanged(); } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00000004; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 3; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationSubmissionRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationSubmissionRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationSubmissionRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationSubmissionRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationSubmissionResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationSubmissionResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ReservationSubmissionResponseProto} */ public static final class ReservationSubmissionResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationSubmissionResponseProto) ReservationSubmissionResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationSubmissionResponseProto.newBuilder() to construct. private ReservationSubmissionResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationSubmissionResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationSubmissionResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationSubmissionResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationSubmissionResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationSubmissionResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationSubmissionResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationSubmissionResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationSubmissionResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationUpdateRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationUpdateRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ boolean hasReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ReservationUpdateRequestProto} */ public static final class ReservationUpdateRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationUpdateRequestProto) ReservationUpdateRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationUpdateRequestProto.newBuilder() to construct. private ReservationUpdateRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationUpdateRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationUpdateRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = reservationDefinition_.toBuilder(); } reservationDefinition_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationDefinition_); reservationDefinition_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = reservationId_.toBuilder(); } reservationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationId_); reservationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.Builder.class); } private int bitField0_; public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } public static final int RESERVATION_ID_FIELD_NUMBER = 2; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReservationDefinition()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getReservationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getReservationDefinition()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getReservationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto) obj; if (hasReservationDefinition() != other.hasReservationDefinition()) return false; if (hasReservationDefinition()) { if (!getReservationDefinition() .equals(other.getReservationDefinition())) return false; } if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReservationDefinition()) { hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER; hash = (53 * hash) + getReservationDefinition().hashCode(); } if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationUpdateRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationUpdateRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationDefinitionFieldBuilder(); getReservationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = null; } else { reservationDefinitionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (reservationIdBuilder_ == null) { reservationId_ = null; } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (reservationDefinitionBuilder_ == null) { result.reservationDefinition_ = reservationDefinition_; } else { result.reservationDefinition_ = reservationDefinitionBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { if (reservationIdBuilder_ == null) { result.reservationId_ = reservationId_; } else { result.reservationId_ = reservationIdBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto.getDefaultInstance()) return this; if (other.hasReservationDefinition()) { mergeReservationDefinition(other.getReservationDefinition()); } if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { if (reservationDefinitionBuilder_ == null) { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } else { return reservationDefinitionBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder setReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationDefinition_ = value; onChanged(); } else { reservationDefinitionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder setReservationDefinition( org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) { if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = builderForValue.build(); onChanged(); } else { reservationDefinitionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder mergeReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reservationDefinition_ != null && reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) { reservationDefinition_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.newBuilder(reservationDefinition_).mergeFrom(value).buildPartial(); } else { reservationDefinition_ = value; } onChanged(); } else { reservationDefinitionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder clearReservationDefinition() { if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = null; onChanged(); } else { reservationDefinitionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReservationDefinitionFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { if (reservationDefinitionBuilder_ != null) { return reservationDefinitionBuilder_.getMessageOrBuilder(); } else { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> getReservationDefinitionFieldBuilder() { if (reservationDefinitionBuilder_ == null) { reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>( getReservationDefinition(), getParentForChildren(), isClean()); reservationDefinition_ = null; } return reservationDefinitionBuilder_; } private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; onChanged(); } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); onChanged(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { reservationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder(reservationId_).mergeFrom(value).buildPartial(); } else { reservationId_ = value; } onChanged(); } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public Builder clearReservationId() { if (reservationIdBuilder_ == null) { reservationId_ = null; onChanged(); } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00000002; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 2; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationUpdateRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationUpdateRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationUpdateRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationUpdateRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationUpdateResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationUpdateResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ReservationUpdateResponseProto} */ public static final class ReservationUpdateResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationUpdateResponseProto) ReservationUpdateResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationUpdateResponseProto.newBuilder() to construct. private ReservationUpdateResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationUpdateResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationUpdateResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationUpdateResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationUpdateResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationUpdateResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationUpdateResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationUpdateResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationUpdateResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationDeleteRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDeleteRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ReservationDeleteRequestProto} */ public static final class ReservationDeleteRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDeleteRequestProto) ReservationDeleteRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationDeleteRequestProto.newBuilder() to construct. private ReservationDeleteRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationDeleteRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationDeleteRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = reservationId_.toBuilder(); } reservationId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(reservationId_); reservationId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.Builder.class); } private int bitField0_; public static final int RESERVATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReservationId()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getReservationId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto) obj; if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationDeleteRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDeleteRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (reservationIdBuilder_ == null) { reservationId_ = null; } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (reservationIdBuilder_ == null) { result.reservationId_ = reservationId_; } else { result.reservationId_ = reservationIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto.getDefaultInstance()) return this; if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; onChanged(); } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); onChanged(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { reservationId_ = org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder(reservationId_).mergeFrom(value).buildPartial(); } else { reservationId_ = value; } onChanged(); } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public Builder clearReservationId() { if (reservationIdBuilder_ == null) { reservationId_ = null; onChanged(); } else { reservationIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDeleteRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDeleteRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationDeleteRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationDeleteRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationDeleteResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDeleteResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.ReservationDeleteResponseProto} */ public static final class ReservationDeleteResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDeleteResponseProto) ReservationDeleteResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationDeleteResponseProto.newBuilder() to construct. private ReservationDeleteResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationDeleteResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationDeleteResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationDeleteResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDeleteResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDeleteResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDeleteResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationDeleteResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationDeleteResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationListRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationListRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string queue = 1; */ boolean hasQueue(); /** * optional string queue = 1; */ java.lang.String getQueue(); /** * optional string queue = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * optional string reservation_id = 3; */ boolean hasReservationId(); /** * optional string reservation_id = 3; */ java.lang.String getReservationId(); /** * optional string reservation_id = 3; */ org.apache.hadoop.thirdparty.protobuf.ByteString getReservationIdBytes(); /** * optional int64 start_time = 4; */ boolean hasStartTime(); /** * optional int64 start_time = 4; */ long getStartTime(); /** * optional int64 end_time = 5; */ boolean hasEndTime(); /** * optional int64 end_time = 5; */ long getEndTime(); /** * optional bool include_resource_allocations = 6; */ boolean hasIncludeResourceAllocations(); /** * optional bool include_resource_allocations = 6; */ boolean getIncludeResourceAllocations(); } /** * Protobuf type {@code hadoop.yarn.ReservationListRequestProto} */ public static final class ReservationListRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationListRequestProto) ReservationListRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationListRequestProto.newBuilder() to construct. private ReservationListRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationListRequestProto() { queue_ = ""; reservationId_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationListRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; queue_ = bs; break; } case 26: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; reservationId_ = bs; break; } case 32: { bitField0_ |= 0x00000004; startTime_ = input.readInt64(); break; } case 40: { bitField0_ |= 0x00000008; endTime_ = input.readInt64(); break; } case 48: { bitField0_ |= 0x00000010; includeResourceAllocations_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.Builder.class); } private int bitField0_; public static final int QUEUE_FIELD_NUMBER = 1; private volatile java.lang.Object queue_; /** * optional string queue = 1; */ public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queue = 1; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RESERVATION_ID_FIELD_NUMBER = 3; private volatile java.lang.Object reservationId_; /** * optional string reservation_id = 3; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string reservation_id = 3; */ public java.lang.String getReservationId() { java.lang.Object ref = reservationId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reservationId_ = s; } return s; } } /** * optional string reservation_id = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getReservationIdBytes() { java.lang.Object ref = reservationId_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reservationId_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int START_TIME_FIELD_NUMBER = 4; private long startTime_; /** * optional int64 start_time = 4; */ public boolean hasStartTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 start_time = 4; */ public long getStartTime() { return startTime_; } public static final int END_TIME_FIELD_NUMBER = 5; private long endTime_; /** * optional int64 end_time = 5; */ public boolean hasEndTime() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 end_time = 5; */ public long getEndTime() { return endTime_; } public static final int INCLUDE_RESOURCE_ALLOCATIONS_FIELD_NUMBER = 6; private boolean includeResourceAllocations_; /** * optional bool include_resource_allocations = 6; */ public boolean hasIncludeResourceAllocations() { return ((bitField0_ & 0x00000010) != 0); } /** * optional bool include_resource_allocations = 6; */ public boolean getIncludeResourceAllocations() { return includeResourceAllocations_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queue_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, reservationId_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(4, startTime_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt64(5, endTime_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeBool(6, includeResourceAllocations_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queue_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, reservationId_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(4, startTime_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(5, endTime_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(6, includeResourceAllocations_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto) obj; if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (getStartTime() != other.getStartTime()) return false; } if (hasEndTime() != other.hasEndTime()) return false; if (hasEndTime()) { if (getEndTime() != other.getEndTime()) return false; } if (hasIncludeResourceAllocations() != other.hasIncludeResourceAllocations()) return false; if (hasIncludeResourceAllocations()) { if (getIncludeResourceAllocations() != other.getIncludeResourceAllocations()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartTime()); } if (hasEndTime()) { hash = (37 * hash) + END_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getEndTime()); } if (hasIncludeResourceAllocations()) { hash = (37 * hash) + INCLUDE_RESOURCE_ALLOCATIONS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIncludeResourceAllocations()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationListRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationListRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); queue_ = ""; bitField0_ = (bitField0_ & ~0x00000001); reservationId_ = ""; bitField0_ = (bitField0_ & ~0x00000002); startTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); endTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); includeResourceAllocations_ = false; bitField0_ = (bitField0_ & ~0x00000010); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.queue_ = queue_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.reservationId_ = reservationId_; if (((from_bitField0_ & 0x00000004) != 0)) { result.startTime_ = startTime_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.endTime_ = endTime_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.includeResourceAllocations_ = includeResourceAllocations_; to_bitField0_ |= 0x00000010; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto.getDefaultInstance()) return this; if (other.hasQueue()) { bitField0_ |= 0x00000001; queue_ = other.queue_; onChanged(); } if (other.hasReservationId()) { bitField0_ |= 0x00000002; reservationId_ = other.reservationId_; onChanged(); } if (other.hasStartTime()) { setStartTime(other.getStartTime()); } if (other.hasEndTime()) { setEndTime(other.getEndTime()); } if (other.hasIncludeResourceAllocations()) { setIncludeResourceAllocations(other.getIncludeResourceAllocations()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object queue_ = ""; /** * optional string queue = 1; */ public boolean hasQueue() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queue = 1; */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 1; */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queue_ = value; onChanged(); return this; } /** * optional string queue = 1; */ public Builder clearQueue() { bitField0_ = (bitField0_ & ~0x00000001); queue_ = getDefaultInstance().getQueue(); onChanged(); return this; } /** * optional string queue = 1; */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; queue_ = value; onChanged(); return this; } private java.lang.Object reservationId_ = ""; /** * optional string reservation_id = 3; */ public boolean hasReservationId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string reservation_id = 3; */ public java.lang.String getReservationId() { java.lang.Object ref = reservationId_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reservationId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string reservation_id = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getReservationIdBytes() { java.lang.Object ref = reservationId_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reservationId_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string reservation_id = 3; */ public Builder setReservationId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; reservationId_ = value; onChanged(); return this; } /** * optional string reservation_id = 3; */ public Builder clearReservationId() { bitField0_ = (bitField0_ & ~0x00000002); reservationId_ = getDefaultInstance().getReservationId(); onChanged(); return this; } /** * optional string reservation_id = 3; */ public Builder setReservationIdBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; reservationId_ = value; onChanged(); return this; } private long startTime_ ; /** * optional int64 start_time = 4; */ public boolean hasStartTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 start_time = 4; */ public long getStartTime() { return startTime_; } /** * optional int64 start_time = 4; */ public Builder setStartTime(long value) { bitField0_ |= 0x00000004; startTime_ = value; onChanged(); return this; } /** * optional int64 start_time = 4; */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000004); startTime_ = 0L; onChanged(); return this; } private long endTime_ ; /** * optional int64 end_time = 5; */ public boolean hasEndTime() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 end_time = 5; */ public long getEndTime() { return endTime_; } /** * optional int64 end_time = 5; */ public Builder setEndTime(long value) { bitField0_ |= 0x00000008; endTime_ = value; onChanged(); return this; } /** * optional int64 end_time = 5; */ public Builder clearEndTime() { bitField0_ = (bitField0_ & ~0x00000008); endTime_ = 0L; onChanged(); return this; } private boolean includeResourceAllocations_ ; /** * optional bool include_resource_allocations = 6; */ public boolean hasIncludeResourceAllocations() { return ((bitField0_ & 0x00000010) != 0); } /** * optional bool include_resource_allocations = 6; */ public boolean getIncludeResourceAllocations() { return includeResourceAllocations_; } /** * optional bool include_resource_allocations = 6; */ public Builder setIncludeResourceAllocations(boolean value) { bitField0_ |= 0x00000010; includeResourceAllocations_ = value; onChanged(); return this; } /** * optional bool include_resource_allocations = 6; */ public Builder clearIncludeResourceAllocations() { bitField0_ = (bitField0_ & ~0x00000010); includeResourceAllocations_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationListRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationListRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationListRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationListRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationListResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationListResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ java.util.List getReservationsList(); /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index); /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ int getReservationsCount(); /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ java.util.List getReservationsOrBuilderList(); /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ReservationListResponseProto} */ public static final class ReservationListResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationListResponseProto) ReservationListResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationListResponseProto.newBuilder() to construct. private ReservationListResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationListResponseProto() { reservations_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReservationListResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { reservations_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } reservations_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { reservations_ = java.util.Collections.unmodifiableList(reservations_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.Builder.class); } public static final int RESERVATIONS_FIELD_NUMBER = 1; private java.util.List reservations_; /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public java.util.List getReservationsList() { return reservations_; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public java.util.List getReservationsOrBuilderList() { return reservations_; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public int getReservationsCount() { return reservations_.size(); } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index) { return reservations_.get(index); } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder( int index) { return reservations_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getReservationsCount(); i++) { if (!getReservations(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < reservations_.size(); i++) { output.writeMessage(1, reservations_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < reservations_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, reservations_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto) obj; if (!getReservationsList() .equals(other.getReservationsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getReservationsCount() > 0) { hash = (37 * hash) + RESERVATIONS_FIELD_NUMBER; hash = (53 * hash) + getReservationsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationListResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationListResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (reservationsBuilder_ == null) { reservations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { reservationsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ReservationListResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto(this); int from_bitField0_ = bitField0_; if (reservationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { reservations_ = java.util.Collections.unmodifiableList(reservations_); bitField0_ = (bitField0_ & ~0x00000001); } result.reservations_ = reservations_; } else { result.reservations_ = reservationsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto.getDefaultInstance()) return this; if (reservationsBuilder_ == null) { if (!other.reservations_.isEmpty()) { if (reservations_.isEmpty()) { reservations_ = other.reservations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureReservationsIsMutable(); reservations_.addAll(other.reservations_); } onChanged(); } } else { if (!other.reservations_.isEmpty()) { if (reservationsBuilder_.isEmpty()) { reservationsBuilder_.dispose(); reservationsBuilder_ = null; reservations_ = other.reservations_; bitField0_ = (bitField0_ & ~0x00000001); reservationsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReservationsFieldBuilder() : null; } else { reservationsBuilder_.addAllMessages(other.reservations_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getReservationsCount(); i++) { if (!getReservations(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List reservations_ = java.util.Collections.emptyList(); private void ensureReservationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { reservations_ = new java.util.ArrayList(reservations_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> reservationsBuilder_; /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public java.util.List getReservationsList() { if (reservationsBuilder_ == null) { return java.util.Collections.unmodifiableList(reservations_); } else { return reservationsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public int getReservationsCount() { if (reservationsBuilder_ == null) { return reservations_.size(); } else { return reservationsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getReservations(int index) { if (reservationsBuilder_ == null) { return reservations_.get(index); } else { return reservationsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder setReservations( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) { if (reservationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationsIsMutable(); reservations_.set(index, value); onChanged(); } else { reservationsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder setReservations( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) { if (reservationsBuilder_ == null) { ensureReservationsIsMutable(); reservations_.set(index, builderForValue.build()); onChanged(); } else { reservationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder addReservations(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) { if (reservationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationsIsMutable(); reservations_.add(value); onChanged(); } else { reservationsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder addReservations( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto value) { if (reservationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationsIsMutable(); reservations_.add(index, value); onChanged(); } else { reservationsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder addReservations( org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) { if (reservationsBuilder_ == null) { ensureReservationsIsMutable(); reservations_.add(builderForValue.build()); onChanged(); } else { reservationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder addReservations( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder builderForValue) { if (reservationsBuilder_ == null) { ensureReservationsIsMutable(); reservations_.add(index, builderForValue.build()); onChanged(); } else { reservationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder addAllReservations( java.lang.Iterable values) { if (reservationsBuilder_ == null) { ensureReservationsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, reservations_); onChanged(); } else { reservationsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder clearReservations() { if (reservationsBuilder_ == null) { reservations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { reservationsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public Builder removeReservations(int index) { if (reservationsBuilder_ == null) { ensureReservationsIsMutable(); reservations_.remove(index); onChanged(); } else { reservationsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder getReservationsBuilder( int index) { return getReservationsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder getReservationsOrBuilder( int index) { if (reservationsBuilder_ == null) { return reservations_.get(index); } else { return reservationsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public java.util.List getReservationsOrBuilderList() { if (reservationsBuilder_ != null) { return reservationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(reservations_); } } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder addReservationsBuilder() { return getReservationsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder addReservationsBuilder( int index) { return getReservationsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ReservationAllocationStateProto reservations = 1; */ public java.util.List getReservationsBuilderList() { return getReservationsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder> getReservationsFieldBuilder() { if (reservationsBuilder_ == null) { reservationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder>( reservations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); reservations_ = null; } return reservationsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationListResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationListResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationListResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ReservationListResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RunSharedCacheCleanerTaskRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskRequestProto} */ public static final class RunSharedCacheCleanerTaskRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto) RunSharedCacheCleanerTaskRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RunSharedCacheCleanerTaskRequestProto.newBuilder() to construct. private RunSharedCacheCleanerTaskRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RunSharedCacheCleanerTaskRequestProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RunSharedCacheCleanerTaskRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RunSharedCacheCleanerTaskRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RunSharedCacheCleanerTaskRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RunSharedCacheCleanerTaskRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RunSharedCacheCleanerTaskResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional bool accepted = 1; */ boolean hasAccepted(); /** * optional bool accepted = 1; */ boolean getAccepted(); } /** * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskResponseProto} */ public static final class RunSharedCacheCleanerTaskResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto) RunSharedCacheCleanerTaskResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RunSharedCacheCleanerTaskResponseProto.newBuilder() to construct. private RunSharedCacheCleanerTaskResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RunSharedCacheCleanerTaskResponseProto() { } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RunSharedCacheCleanerTaskResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { bitField0_ |= 0x00000001; accepted_ = input.readBool(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.Builder.class); } private int bitField0_; public static final int ACCEPTED_FIELD_NUMBER = 1; private boolean accepted_; /** * optional bool accepted = 1; */ public boolean hasAccepted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool accepted = 1; */ public boolean getAccepted() { return accepted_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeBool(1, accepted_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(1, accepted_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto) obj; if (hasAccepted() != other.hasAccepted()) return false; if (hasAccepted()) { if (getAccepted() != other.getAccepted()) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAccepted()) { hash = (37 * hash) + ACCEPTED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getAccepted()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RunSharedCacheCleanerTaskResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); accepted_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.accepted_ = accepted_; to_bitField0_ |= 0x00000001; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto.getDefaultInstance()) return this; if (other.hasAccepted()) { setAccepted(other.getAccepted()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean accepted_ ; /** * optional bool accepted = 1; */ public boolean hasAccepted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bool accepted = 1; */ public boolean getAccepted() { return accepted_; } /** * optional bool accepted = 1; */ public Builder setAccepted(boolean value) { bitField0_ |= 0x00000001; accepted_ = value; onChanged(); return this; } /** * optional bool accepted = 1; */ public Builder clearAccepted() { bitField0_ = (bitField0_ & ~0x00000001); accepted_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RunSharedCacheCleanerTaskResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RunSharedCacheCleanerTaskResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new RunSharedCacheCleanerTaskResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.RunSharedCacheCleanerTaskResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetLocalizationStatusesRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLocalizationStatusesRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ int getContainerIdCount(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ java.util.List getContainerIdOrBuilderList(); /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index); } /** *
   * Localization
   * 
* * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesRequestProto} */ public static final class GetLocalizationStatusesRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLocalizationStatusesRequestProto) GetLocalizationStatusesRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetLocalizationStatusesRequestProto.newBuilder() to construct. private GetLocalizationStatusesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetLocalizationStatusesRequestProto() { containerId_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetLocalizationStatusesRequestProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } containerId_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.Builder.class); } public static final int CONTAINER_ID_FIELD_NUMBER = 1; private java.util.List containerId_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { return containerId_; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { return containerId_.size(); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { return containerId_.get(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { return containerId_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < containerId_.size(); i++) { output.writeMessage(1, containerId_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < containerId_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, containerId_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto) obj; if (!getContainerIdList() .equals(other.getContainerIdList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContainerIdCount() > 0) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerIdList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     * Localization
     * 
* * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLocalizationStatusesRequestProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { containerIdBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto(this); int from_bitField0_ = bitField0_; if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { containerId_ = java.util.Collections.unmodifiableList(containerId_); bitField0_ = (bitField0_ & ~0x00000001); } result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto.getDefaultInstance()) return this; if (containerIdBuilder_ == null) { if (!other.containerId_.isEmpty()) { if (containerId_.isEmpty()) { containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContainerIdIsMutable(); containerId_.addAll(other.containerId_); } onChanged(); } } else { if (!other.containerId_.isEmpty()) { if (containerIdBuilder_.isEmpty()) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; containerId_ = other.containerId_; bitField0_ = (bitField0_ & ~0x00000001); containerIdBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerIdFieldBuilder() : null; } else { containerIdBuilder_.addAllMessages(other.containerId_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List containerId_ = java.util.Collections.emptyList(); private void ensureContainerIdIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { containerId_ = new java.util.ArrayList(containerId_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdList() { if (containerIdBuilder_ == null) { return java.util.Collections.unmodifiableList(containerId_); } else { return containerIdBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public int getContainerIdCount() { if (containerIdBuilder_ == null) { return containerId_.size(); } else { return containerIdBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.set(index, value); onChanged(); } else { containerIdBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.set(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(value); onChanged(); } else { containerIdBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIdIsMutable(); containerId_.add(index, value); onChanged(); } else { containerIdBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addContainerId( int index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.add(index, builderForValue.build()); onChanged(); } else { containerIdBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder addAllContainerId( java.lang.Iterable values) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containerId_); onChanged(); } else { containerIdBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { containerIdBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder removeContainerId(int index) { if (containerIdBuilder_ == null) { ensureContainerIdIsMutable(); containerId_.remove(index); onChanged(); } else { containerIdBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder( int index) { return getContainerIdFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder( int index) { if (containerIdBuilder_ == null) { return containerId_.get(index); } else { return containerIdBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdOrBuilderList() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containerId_); } } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder() { return getContainerIdFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder addContainerIdBuilder( int index) { return getContainerIdFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerIdProto container_id = 1; */ public java.util.List getContainerIdBuilderList() { return getContainerIdFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( containerId_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLocalizationStatusesRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLocalizationStatusesRequestProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetLocalizationStatusesRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetLocalizationStatusesRequestProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetLocalizationStatusesResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetLocalizationStatusesResponseProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ java.util.List getCntnLocalizationStatusesList(); /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index); /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ int getCntnLocalizationStatusesCount(); /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ java.util.List getCntnLocalizationStatusesOrBuilderList(); /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder( int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ int getFailedRequestsCount(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ java.util.List getFailedRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesResponseProto} */ public static final class GetLocalizationStatusesResponseProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.GetLocalizationStatusesResponseProto) GetLocalizationStatusesResponseProtoOrBuilder { private static final long serialVersionUID = 0L; // Use GetLocalizationStatusesResponseProto.newBuilder() to construct. private GetLocalizationStatusesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private GetLocalizationStatusesResponseProto() { cntnLocalizationStatuses_ = java.util.Collections.emptyList(); failedRequests_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetLocalizationStatusesResponseProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { cntnLocalizationStatuses_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } cntnLocalizationStatuses_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } failedRequests_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { cntnLocalizationStatuses_ = java.util.Collections.unmodifiableList(cntnLocalizationStatuses_); } if (((mutable_bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.Builder.class); } public static final int CNTN_LOCALIZATION_STATUSES_FIELD_NUMBER = 1; private java.util.List cntnLocalizationStatuses_; /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public java.util.List getCntnLocalizationStatusesList() { return cntnLocalizationStatuses_; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public java.util.List getCntnLocalizationStatusesOrBuilderList() { return cntnLocalizationStatuses_; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public int getCntnLocalizationStatusesCount() { return cntnLocalizationStatuses_.size(); } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index) { return cntnLocalizationStatuses_.get(index); } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder( int index) { return cntnLocalizationStatuses_.get(index); } public static final int FAILED_REQUESTS_FIELD_NUMBER = 2; private java.util.List failedRequests_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { return failedRequests_; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { return failedRequests_.size(); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { return failedRequests_.get(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { return failedRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cntnLocalizationStatuses_.size(); i++) { output.writeMessage(1, cntnLocalizationStatuses_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { output.writeMessage(2, failedRequests_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < cntnLocalizationStatuses_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, cntnLocalizationStatuses_.get(i)); } for (int i = 0; i < failedRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, failedRequests_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto) obj; if (!getCntnLocalizationStatusesList() .equals(other.getCntnLocalizationStatusesList())) return false; if (!getFailedRequestsList() .equals(other.getFailedRequestsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCntnLocalizationStatusesCount() > 0) { hash = (37 * hash) + CNTN_LOCALIZATION_STATUSES_FIELD_NUMBER; hash = (53 * hash) + getCntnLocalizationStatusesList().hashCode(); } if (getFailedRequestsCount() > 0) { hash = (37 * hash) + FAILED_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getFailedRequestsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.GetLocalizationStatusesResponseProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetLocalizationStatusesResponseProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCntnLocalizationStatusesFieldBuilder(); getFailedRequestsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (cntnLocalizationStatusesBuilder_ == null) { cntnLocalizationStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { cntnLocalizationStatusesBuilder_.clear(); } if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { failedRequestsBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto(this); int from_bitField0_ = bitField0_; if (cntnLocalizationStatusesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { cntnLocalizationStatuses_ = java.util.Collections.unmodifiableList(cntnLocalizationStatuses_); bitField0_ = (bitField0_ & ~0x00000001); } result.cntnLocalizationStatuses_ = cntnLocalizationStatuses_; } else { result.cntnLocalizationStatuses_ = cntnLocalizationStatusesBuilder_.build(); } if (failedRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { failedRequests_ = java.util.Collections.unmodifiableList(failedRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.failedRequests_ = failedRequests_; } else { result.failedRequests_ = failedRequestsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto.getDefaultInstance()) return this; if (cntnLocalizationStatusesBuilder_ == null) { if (!other.cntnLocalizationStatuses_.isEmpty()) { if (cntnLocalizationStatuses_.isEmpty()) { cntnLocalizationStatuses_ = other.cntnLocalizationStatuses_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.addAll(other.cntnLocalizationStatuses_); } onChanged(); } } else { if (!other.cntnLocalizationStatuses_.isEmpty()) { if (cntnLocalizationStatusesBuilder_.isEmpty()) { cntnLocalizationStatusesBuilder_.dispose(); cntnLocalizationStatusesBuilder_ = null; cntnLocalizationStatuses_ = other.cntnLocalizationStatuses_; bitField0_ = (bitField0_ & ~0x00000001); cntnLocalizationStatusesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCntnLocalizationStatusesFieldBuilder() : null; } else { cntnLocalizationStatusesBuilder_.addAllMessages(other.cntnLocalizationStatuses_); } } } if (failedRequestsBuilder_ == null) { if (!other.failedRequests_.isEmpty()) { if (failedRequests_.isEmpty()) { failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFailedRequestsIsMutable(); failedRequests_.addAll(other.failedRequests_); } onChanged(); } } else { if (!other.failedRequests_.isEmpty()) { if (failedRequestsBuilder_.isEmpty()) { failedRequestsBuilder_.dispose(); failedRequestsBuilder_ = null; failedRequests_ = other.failedRequests_; bitField0_ = (bitField0_ & ~0x00000002); failedRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFailedRequestsFieldBuilder() : null; } else { failedRequestsBuilder_.addAllMessages(other.failedRequests_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List cntnLocalizationStatuses_ = java.util.Collections.emptyList(); private void ensureCntnLocalizationStatusesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { cntnLocalizationStatuses_ = new java.util.ArrayList(cntnLocalizationStatuses_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> cntnLocalizationStatusesBuilder_; /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public java.util.List getCntnLocalizationStatusesList() { if (cntnLocalizationStatusesBuilder_ == null) { return java.util.Collections.unmodifiableList(cntnLocalizationStatuses_); } else { return cntnLocalizationStatusesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public int getCntnLocalizationStatusesCount() { if (cntnLocalizationStatusesBuilder_ == null) { return cntnLocalizationStatuses_.size(); } else { return cntnLocalizationStatusesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getCntnLocalizationStatuses(int index) { if (cntnLocalizationStatusesBuilder_ == null) { return cntnLocalizationStatuses_.get(index); } else { return cntnLocalizationStatusesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder setCntnLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) { if (cntnLocalizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.set(index, value); onChanged(); } else { cntnLocalizationStatusesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder setCntnLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) { if (cntnLocalizationStatusesBuilder_ == null) { ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.set(index, builderForValue.build()); onChanged(); } else { cntnLocalizationStatusesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder addCntnLocalizationStatuses(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) { if (cntnLocalizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.add(value); onChanged(); } else { cntnLocalizationStatusesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder addCntnLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto value) { if (cntnLocalizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.add(index, value); onChanged(); } else { cntnLocalizationStatusesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder addCntnLocalizationStatuses( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) { if (cntnLocalizationStatusesBuilder_ == null) { ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.add(builderForValue.build()); onChanged(); } else { cntnLocalizationStatusesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder addCntnLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder builderForValue) { if (cntnLocalizationStatusesBuilder_ == null) { ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.add(index, builderForValue.build()); onChanged(); } else { cntnLocalizationStatusesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder addAllCntnLocalizationStatuses( java.lang.Iterable values) { if (cntnLocalizationStatusesBuilder_ == null) { ensureCntnLocalizationStatusesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, cntnLocalizationStatuses_); onChanged(); } else { cntnLocalizationStatusesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder clearCntnLocalizationStatuses() { if (cntnLocalizationStatusesBuilder_ == null) { cntnLocalizationStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { cntnLocalizationStatusesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public Builder removeCntnLocalizationStatuses(int index) { if (cntnLocalizationStatusesBuilder_ == null) { ensureCntnLocalizationStatusesIsMutable(); cntnLocalizationStatuses_.remove(index); onChanged(); } else { cntnLocalizationStatusesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder getCntnLocalizationStatusesBuilder( int index) { return getCntnLocalizationStatusesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder getCntnLocalizationStatusesOrBuilder( int index) { if (cntnLocalizationStatusesBuilder_ == null) { return cntnLocalizationStatuses_.get(index); } else { return cntnLocalizationStatusesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public java.util.List getCntnLocalizationStatusesOrBuilderList() { if (cntnLocalizationStatusesBuilder_ != null) { return cntnLocalizationStatusesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cntnLocalizationStatuses_); } } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder addCntnLocalizationStatusesBuilder() { return getCntnLocalizationStatusesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder addCntnLocalizationStatusesBuilder( int index) { return getCntnLocalizationStatusesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerLocalizationStatusesProto cntn_localization_statuses = 1; */ public java.util.List getCntnLocalizationStatusesBuilderList() { return getCntnLocalizationStatusesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder> getCntnLocalizationStatusesFieldBuilder() { if (cntnLocalizationStatusesBuilder_ == null) { cntnLocalizationStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder>( cntnLocalizationStatuses_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); cntnLocalizationStatuses_ = null; } return cntnLocalizationStatusesBuilder_; } private java.util.List failedRequests_ = java.util.Collections.emptyList(); private void ensureFailedRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { failedRequests_ = new java.util.ArrayList(failedRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> failedRequestsBuilder_; /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsList() { if (failedRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(failedRequests_); } else { return failedRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public int getFailedRequestsCount() { if (failedRequestsBuilder_ == null) { return failedRequests_.size(); } else { return failedRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto getFailedRequests(int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.set(index, value); onChanged(); } else { failedRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder setFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.set(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(value); onChanged(); } else { failedRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto value) { if (failedRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFailedRequestsIsMutable(); failedRequests_.add(index, value); onChanged(); } else { failedRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addFailedRequests( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder builderForValue) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.add(index, builderForValue.build()); onChanged(); } else { failedRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder addAllFailedRequests( java.lang.Iterable values) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, failedRequests_); onChanged(); } else { failedRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder clearFailedRequests() { if (failedRequestsBuilder_ == null) { failedRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { failedRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public Builder removeFailedRequests(int index) { if (failedRequestsBuilder_ == null) { ensureFailedRequestsIsMutable(); failedRequests_.remove(index); onChanged(); } else { failedRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder getFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder getFailedRequestsOrBuilder( int index) { if (failedRequestsBuilder_ == null) { return failedRequests_.get(index); } else { return failedRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsOrBuilderList() { if (failedRequestsBuilder_ != null) { return failedRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(failedRequests_); } } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder() { return getFailedRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder addFailedRequestsBuilder( int index) { return getFailedRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ContainerExceptionMapProto failed_requests = 2; */ public java.util.List getFailedRequestsBuilderList() { return getFailedRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder> getFailedRequestsFieldBuilder() { if (failedRequestsBuilder_ == null) { failedRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerExceptionMapProtoOrBuilder>( failedRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); failedRequests_ = null; } return failedRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetLocalizationStatusesResponseProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.GetLocalizationStatusesResponseProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public GetLocalizationStatusesResponseProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new GetLocalizationStatusesResponseProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLocalizationStatusesResponseProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface LocalizationStatusProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.LocalizationStatusProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string resource_key = 1; */ boolean hasResourceKey(); /** * optional string resource_key = 1; */ java.lang.String getResourceKey(); /** * optional string resource_key = 1; */ org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes(); /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ boolean hasLocalizationState(); /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState(); /** * optional string diagnostics = 3; */ boolean hasDiagnostics(); /** * optional string diagnostics = 3; */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 3; */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); } /** * Protobuf type {@code hadoop.yarn.LocalizationStatusProto} */ public static final class LocalizationStatusProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.LocalizationStatusProto) LocalizationStatusProtoOrBuilder { private static final long serialVersionUID = 0L; // Use LocalizationStatusProto.newBuilder() to construct. private LocalizationStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private LocalizationStatusProto() { resourceKey_ = ""; localizationState_ = 1; diagnostics_ = ""; } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LocalizationStatusProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; resourceKey_ = bs; break; } case 16: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto value = org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; localizationState_ = rawValue; } break; } case 26: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; diagnostics_ = bs; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder.class); } private int bitField0_; public static final int RESOURCE_KEY_FIELD_NUMBER = 1; private volatile java.lang.Object resourceKey_; /** * optional string resource_key = 1; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string resource_key = 1; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } } /** * optional string resource_key = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LOCALIZATION_STATE_FIELD_NUMBER = 2; private int localizationState_; /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public boolean hasLocalizationState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.valueOf(localizationState_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.L_PENDING : result; } public static final int DIAGNOSTICS_FIELD_NUMBER = 3; private volatile java.lang.Object diagnostics_; /** * optional string diagnostics = 3; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string diagnostics = 3; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, resourceKey_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeEnum(2, localizationState_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnostics_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, resourceKey_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, localizationState_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnostics_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto) obj; if (hasResourceKey() != other.hasResourceKey()) return false; if (hasResourceKey()) { if (!getResourceKey() .equals(other.getResourceKey())) return false; } if (hasLocalizationState() != other.hasLocalizationState()) return false; if (hasLocalizationState()) { if (localizationState_ != other.localizationState_) return false; } if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResourceKey()) { hash = (37 * hash) + RESOURCE_KEY_FIELD_NUMBER; hash = (53 * hash) + getResourceKey().hashCode(); } if (hasLocalizationState()) { hash = (37 * hash) + LOCALIZATION_STATE_FIELD_NUMBER; hash = (53 * hash) + localizationState_; } if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.LocalizationStatusProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.LocalizationStatusProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceKey_ = ""; bitField0_ = (bitField0_ & ~0x00000001); localizationState_ = 1; bitField0_ = (bitField0_ & ~0x00000002); diagnostics_ = ""; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_LocalizationStatusProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.resourceKey_ = resourceKey_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.localizationState_ = localizationState_; if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.diagnostics_ = diagnostics_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance()) return this; if (other.hasResourceKey()) { bitField0_ |= 0x00000001; resourceKey_ = other.resourceKey_; onChanged(); } if (other.hasLocalizationState()) { setLocalizationState(other.getLocalizationState()); } if (other.hasDiagnostics()) { bitField0_ |= 0x00000004; diagnostics_ = other.diagnostics_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object resourceKey_ = ""; /** * optional string resource_key = 1; */ public boolean hasResourceKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string resource_key = 1; */ public java.lang.String getResourceKey() { java.lang.Object ref = resourceKey_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceKey_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string resource_key = 1; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getResourceKeyBytes() { java.lang.Object ref = resourceKey_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceKey_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string resource_key = 1; */ public Builder setResourceKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; resourceKey_ = value; onChanged(); return this; } /** * optional string resource_key = 1; */ public Builder clearResourceKey() { bitField0_ = (bitField0_ & ~0x00000001); resourceKey_ = getDefaultInstance().getResourceKey(); onChanged(); return this; } /** * optional string resource_key = 1; */ public Builder setResourceKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; resourceKey_ = value; onChanged(); return this; } private int localizationState_ = 1; /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public boolean hasLocalizationState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto getLocalizationState() { @SuppressWarnings("deprecation") org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto result = org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.valueOf(localizationState_); return result == null ? org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto.L_PENDING : result; } /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public Builder setLocalizationState(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; localizationState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.LocalizationStateProto localization_state = 2; */ public Builder clearLocalizationState() { bitField0_ = (bitField0_ & ~0x00000002); localizationState_ = 1; onChanged(); return this; } private java.lang.Object diagnostics_ = ""; /** * optional string diagnostics = 3; */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string diagnostics = 3; */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 3; */ public org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 3; */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; diagnostics_ = value; onChanged(); return this; } /** * optional string diagnostics = 3; */ public Builder clearDiagnostics() { bitField0_ = (bitField0_ & ~0x00000004); diagnostics_ = getDefaultInstance().getDiagnostics(); onChanged(); return this; } /** * optional string diagnostics = 3; */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; diagnostics_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.LocalizationStatusProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.LocalizationStatusProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public LocalizationStatusProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new LocalizationStatusProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerLocalizationStatusesProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerLocalizationStatusesProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ java.util.List getLocalizationStatusesList(); /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index); /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ int getLocalizationStatusesCount(); /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ java.util.List getLocalizationStatusesOrBuilderList(); /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ContainerLocalizationStatusesProto} */ public static final class ContainerLocalizationStatusesProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerLocalizationStatusesProto) ContainerLocalizationStatusesProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerLocalizationStatusesProto.newBuilder() to construct. private ContainerLocalizationStatusesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerLocalizationStatusesProto() { localizationStatuses_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ContainerLocalizationStatusesProto( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder subBuilder = null; if (((bitField0_ & 0x00000001) != 0)) { subBuilder = containerId_.toBuilder(); } containerId_ = input.readMessage(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(containerId_); containerId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { localizationStatuses_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000002; } localizationStatuses_.add( input.readMessage(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.PARSER, extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { localizationStatuses_ = java.util.Collections.unmodifiableList(localizationStatuses_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int LOCALIZATION_STATUSES_FIELD_NUMBER = 2; private java.util.List localizationStatuses_; /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public java.util.List getLocalizationStatusesList() { return localizationStatuses_; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public java.util.List getLocalizationStatusesOrBuilderList() { return localizationStatuses_; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public int getLocalizationStatusesCount() { return localizationStatuses_.size(); } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index) { return localizationStatuses_.get(index); } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder( int index) { return localizationStatuses_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } for (int i = 0; i < localizationStatuses_.size(); i++) { output.writeMessage(2, localizationStatuses_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } for (int i = 0; i < localizationStatuses_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, localizationStatuses_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto other = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (!getLocalizationStatusesList() .equals(other.getLocalizationStatusesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (getLocalizationStatusesCount() > 0) { hash = (37 * hash) + LOCALIZATION_STATUSES_FIELD_NUMBER; hash = (53 * hash) + getLocalizationStatusesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerLocalizationStatusesProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerLocalizationStatusesProto) org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProtoOrBuilder { public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.class, org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getLocalizationStatusesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (containerIdBuilder_ == null) { containerId_ = null; } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (localizationStatusesBuilder_ == null) { localizationStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { localizationStatusesBuilder_.clear(); } return this; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto build() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto buildPartial() { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto result = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { if (containerIdBuilder_ == null) { result.containerId_ = containerId_; } else { result.containerId_ = containerIdBuilder_.build(); } to_bitField0_ |= 0x00000001; } if (localizationStatusesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { localizationStatuses_ = java.util.Collections.unmodifiableList(localizationStatuses_); bitField0_ = (bitField0_ & ~0x00000002); } result.localizationStatuses_ = localizationStatuses_; } else { result.localizationStatuses_ = localizationStatusesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto) { return mergeFrom((org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto other) { if (other == org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (localizationStatusesBuilder_ == null) { if (!other.localizationStatuses_.isEmpty()) { if (localizationStatuses_.isEmpty()) { localizationStatuses_ = other.localizationStatuses_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureLocalizationStatusesIsMutable(); localizationStatuses_.addAll(other.localizationStatuses_); } onChanged(); } } else { if (!other.localizationStatuses_.isEmpty()) { if (localizationStatusesBuilder_.isEmpty()) { localizationStatusesBuilder_.dispose(); localizationStatusesBuilder_ = null; localizationStatuses_ = other.localizationStatuses_; bitField0_ = (bitField0_ & ~0x00000002); localizationStatusesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLocalizationStatusesFieldBuilder() : null; } else { localizationStatusesBuilder_.addAllMessages(other.localizationStatuses_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; onChanged(); } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); onChanged(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { containerId_ = org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder(containerId_).mergeFrom(value).buildPartial(); } else { containerId_ = value; } onChanged(); } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { if (containerIdBuilder_ == null) { containerId_ = null; onChanged(); } else { containerIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private java.util.List localizationStatuses_ = java.util.Collections.emptyList(); private void ensureLocalizationStatusesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { localizationStatuses_ = new java.util.ArrayList(localizationStatuses_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> localizationStatusesBuilder_; /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public java.util.List getLocalizationStatusesList() { if (localizationStatusesBuilder_ == null) { return java.util.Collections.unmodifiableList(localizationStatuses_); } else { return localizationStatusesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public int getLocalizationStatusesCount() { if (localizationStatusesBuilder_ == null) { return localizationStatuses_.size(); } else { return localizationStatusesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto getLocalizationStatuses(int index) { if (localizationStatusesBuilder_ == null) { return localizationStatuses_.get(index); } else { return localizationStatusesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder setLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) { if (localizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalizationStatusesIsMutable(); localizationStatuses_.set(index, value); onChanged(); } else { localizationStatusesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder setLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) { if (localizationStatusesBuilder_ == null) { ensureLocalizationStatusesIsMutable(); localizationStatuses_.set(index, builderForValue.build()); onChanged(); } else { localizationStatusesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder addLocalizationStatuses(org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) { if (localizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalizationStatusesIsMutable(); localizationStatuses_.add(value); onChanged(); } else { localizationStatusesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder addLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto value) { if (localizationStatusesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalizationStatusesIsMutable(); localizationStatuses_.add(index, value); onChanged(); } else { localizationStatusesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder addLocalizationStatuses( org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) { if (localizationStatusesBuilder_ == null) { ensureLocalizationStatusesIsMutable(); localizationStatuses_.add(builderForValue.build()); onChanged(); } else { localizationStatusesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder addLocalizationStatuses( int index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder builderForValue) { if (localizationStatusesBuilder_ == null) { ensureLocalizationStatusesIsMutable(); localizationStatuses_.add(index, builderForValue.build()); onChanged(); } else { localizationStatusesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder addAllLocalizationStatuses( java.lang.Iterable values) { if (localizationStatusesBuilder_ == null) { ensureLocalizationStatusesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, localizationStatuses_); onChanged(); } else { localizationStatusesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder clearLocalizationStatuses() { if (localizationStatusesBuilder_ == null) { localizationStatuses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { localizationStatusesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public Builder removeLocalizationStatuses(int index) { if (localizationStatusesBuilder_ == null) { ensureLocalizationStatusesIsMutable(); localizationStatuses_.remove(index); onChanged(); } else { localizationStatusesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder getLocalizationStatusesBuilder( int index) { return getLocalizationStatusesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder getLocalizationStatusesOrBuilder( int index) { if (localizationStatusesBuilder_ == null) { return localizationStatuses_.get(index); } else { return localizationStatusesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public java.util.List getLocalizationStatusesOrBuilderList() { if (localizationStatusesBuilder_ != null) { return localizationStatusesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(localizationStatuses_); } } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder addLocalizationStatusesBuilder() { return getLocalizationStatusesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder addLocalizationStatusesBuilder( int index) { return getLocalizationStatusesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.LocalizationStatusProto localization_statuses = 2; */ public java.util.List getLocalizationStatusesBuilderList() { return getLocalizationStatusesFieldBuilder().getBuilderList(); } private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder> getLocalizationStatusesFieldBuilder() { if (localizationStatusesBuilder_ == null) { localizationStatusesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProto.Builder, org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStatusProtoOrBuilder>( localizationStatuses_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); localizationStatuses_ = null; } return localizationStatusesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerLocalizationStatusesProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerLocalizationStatusesProto) private static final org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto(); } public static org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerLocalizationStatusesProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return new ContainerLocalizationStatusesProto(input, extensionRegistry); } }; public static org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerLocalizationStatusesProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_AllocateRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NMTokenProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdatedContainerProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_AllocateResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StartContainerRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StartContainerResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StopContainerRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StopContainerResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RollbackResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_CommitResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StartContainersRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StartContainersResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StopContainersRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StopContainersResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainersRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetContainersResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationListRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationListResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_LocalizationStatusProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable; private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor; private static final org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable; public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\031yarn_service_protos.proto\022\013hadoop.yarn" + "\032\016Security.proto\032\021yarn_protos.proto\"\253\001\n%" + "RegisterApplicationMasterRequestProto\022\014\n" + "\004host\030\001 \001(\t\022\020\n\010rpc_port\030\002 \001(\005\022\024\n\014trackin" + "g_url\030\003 \001(\t\022L\n\025placement_constraints\030\004 \003" + "(\0132-.hadoop.yarn.PlacementConstraintMapE" + "ntryProto\"\243\004\n&RegisterApplicationMasterR" + "esponseProto\0225\n\021maximumCapability\030\001 \001(\0132" + "\032.hadoop.yarn.ResourceProto\022%\n\035client_to" + "_am_token_master_key\030\002 \001(\014\022=\n\020applicatio" + "n_ACLs\030\003 \003(\0132#.hadoop.yarn.ApplicationAC" + "LMapProto\022F\n!containers_from_previous_at" + "tempts\030\004 \003(\0132\033.hadoop.yarn.ContainerProt" + "o\022\r\n\005queue\030\005 \001(\t\022C\n nm_tokens_from_previ" + "ous_attempts\030\006 \003(\0132\031.hadoop.yarn.NMToken" + "Proto\022E\n\030scheduler_resource_types\030\007 \003(\0162" + "#.hadoop.yarn.SchedulerResourceTypes\022=\n\021" + "resource_profiles\030\010 \001(\0132\".hadoop.yarn.Re" + "sourceProfilesProto\022:\n\016resource_types\030\t " + "\003(\0132\".hadoop.yarn.ResourceTypeInfoProto\"" + "\234\001\n#FinishApplicationMasterRequestProto\022" + "\023\n\013diagnostics\030\001 \001(\t\022\024\n\014tracking_url\030\002 \001" + "(\t\022J\n\030final_application_status\030\003 \001(\0162(.h" + "adoop.yarn.FinalApplicationStatusProto\"E" + "\n$FinishApplicationMasterResponseProto\022\035" + "\n\016isUnregistered\030\001 \001(\010:\005false\"\222\002\n\033Update" + "ContainerRequestProto\022\031\n\021container_versi" + "on\030\001 \002(\005\0223\n\014container_id\030\002 \002(\0132\035.hadoop." + "yarn.ContainerIdProto\022:\n\013update_type\030\003 \002" + "(\0162%.hadoop.yarn.ContainerUpdateTypeProt" + "o\022.\n\ncapability\030\004 \001(\0132\032.hadoop.yarn.Reso" + "urceProto\0227\n\016execution_type\030\005 \001(\0162\037.hado" + "op.yarn.ExecutionTypeProto\"\220\001\n\031UpdateCon" + "tainerErrorProto\022\016\n\006reason\030\001 \001(\t\022@\n\016upda" + "te_request\030\002 \001(\0132(.hadoop.yarn.UpdateCon" + "tainerRequestProto\022!\n\031current_container_" + "version\030\003 \001(\005\"\377\002\n\024AllocateRequestProto\022." + "\n\003ask\030\001 \003(\0132!.hadoop.yarn.ResourceReques" + "tProto\022.\n\007release\030\002 \003(\0132\035.hadoop.yarn.Co" + "ntainerIdProto\022E\n\021blacklist_request\030\003 \001(" + "\0132*.hadoop.yarn.ResourceBlacklistRequest" + "Proto\022\023\n\013response_id\030\004 \001(\005\022\020\n\010progress\030\005" + " \001(\002\022A\n\017update_requests\030\007 \003(\0132(.hadoop.y" + "arn.UpdateContainerRequestProto\022@\n\023sched" + "uling_requests\030\n \003(\0132#.hadoop.yarn.Sched" + "ulingRequestProto\022\024\n\014tracking_url\030\013 \001(\t\"" + "b\n\014NMTokenProto\022(\n\006nodeId\030\001 \001(\0132\030.hadoop" + ".yarn.NodeIdProto\022(\n\005token\030\002 \001(\0132\031.hadoo" + "p.common.TokenProto\"\203\001\n\025UpdatedContainer" + "Proto\022:\n\013update_type\030\001 \002(\0162%.hadoop.yarn" + ".ContainerUpdateTypeProto\022.\n\tcontainer\030\002" + " \002(\0132\033.hadoop.yarn.ContainerProto\"\376\006\n\025Al" + "locateResponseProto\0220\n\013a_m_command\030\001 \001(\016" + "2\033.hadoop.yarn.AMCommandProto\022\023\n\013respons" + "e_id\030\002 \001(\005\0229\n\024allocated_containers\030\003 \003(\013" + "2\033.hadoop.yarn.ContainerProto\022G\n\034complet" + "ed_container_statuses\030\004 \003(\0132!.hadoop.yar" + "n.ContainerStatusProto\022)\n\005limit\030\005 \001(\0132\032." + "hadoop.yarn.ResourceProto\0223\n\rupdated_nod" + "es\030\006 \003(\0132\034.hadoop.yarn.NodeReportProto\022\031" + "\n\021num_cluster_nodes\030\007 \001(\005\0224\n\007preempt\030\010 \001" + "(\0132#.hadoop.yarn.PreemptionMessageProto\022" + ",\n\tnm_tokens\030\t \003(\0132\031.hadoop.yarn.NMToken" + "Proto\022.\n\013am_rm_token\030\014 \001(\0132\031.hadoop.comm" + "on.TokenProto\0228\n\024application_priority\030\r " + "\001(\0132\032.hadoop.yarn.PriorityProto\0227\n\016colle" + "ctor_info\030\016 \001(\0132\037.hadoop.yarn.CollectorI" + "nfoProto\022=\n\rupdate_errors\030\017 \003(\0132&.hadoop" + ".yarn.UpdateContainerErrorProto\022>\n\022updat" + "ed_containers\030\020 \003(\0132\".hadoop.yarn.Update" + "dContainerProto\022F\n!containers_from_previ" + "ous_attempts\030\021 \003(\0132\033.hadoop.yarn.Contain" + "erProto\022Q\n\034rejected_scheduling_requests\030" + "\022 \003(\0132+.hadoop.yarn.RejectedSchedulingRe" + "questProto\"\037\n\035GetNewApplicationRequestPr" + "oto\"\220\001\n\036GetNewApplicationResponseProto\0227" + "\n\016application_id\030\001 \001(\0132\037.hadoop.yarn.App" + "licationIdProto\0225\n\021maximumCapability\030\002 \001" + "(\0132\032.hadoop.yarn.ResourceProto\"[\n GetApp" + "licationReportRequestProto\0227\n\016applicatio" + "n_id\030\001 \001(\0132\037.hadoop.yarn.ApplicationIdPr" + "oto\"d\n!GetApplicationReportResponseProto" + "\022?\n\022application_report\030\001 \001(\0132#.hadoop.ya" + "rn.ApplicationReportProto\"w\n\035SubmitAppli" + "cationRequestProto\022V\n\036application_submis" + "sion_context\030\001 \001(\0132..hadoop.yarn.Applica" + "tionSubmissionContextProto\" \n\036SubmitAppl" + "icationResponseProto\"l\n\"FailApplicationA" + "ttemptRequestProto\022F\n\026application_attemp" + "t_id\030\001 \001(\0132&.hadoop.yarn.ApplicationAtte" + "mptIdProto\"%\n#FailApplicationAttemptResp" + "onseProto\"k\n\033KillApplicationRequestProto" + "\0227\n\016application_id\030\001 \001(\0132\037.hadoop.yarn.A" + "pplicationIdProto\022\023\n\013diagnostics\030\002 \001(\t\"@" + "\n\034KillApplicationResponseProto\022 \n\021is_kil" + "l_completed\030\001 \001(\010:\005false\"\037\n\035GetClusterMe" + "tricsRequestProto\"_\n\036GetClusterMetricsRe" + "sponseProto\022=\n\017cluster_metrics\030\001 \001(\0132$.h" + "adoop.yarn.YarnClusterMetricsProto\"x\n\'Mo" + "veApplicationAcrossQueuesRequestProto\0227\n" + "\016application_id\030\001 \002(\0132\037.hadoop.yarn.Appl" + "icationIdProto\022\024\n\014target_queue\030\002 \002(\t\"*\n(" + "MoveApplicationAcrossQueuesResponseProto" + "\"\343\002\n\033GetApplicationsRequestProto\022\031\n\021appl" + "ication_types\030\001 \003(\t\022B\n\022application_state" + "s\030\002 \003(\0162&.hadoop.yarn.YarnApplicationSta" + "teProto\022\r\n\005users\030\003 \003(\t\022\016\n\006queues\030\004 \003(\t\022\r" + "\n\005limit\030\005 \001(\003\022\023\n\013start_begin\030\006 \001(\003\022\021\n\tst" + "art_end\030\007 \001(\003\022\024\n\014finish_begin\030\010 \001(\003\022\022\n\nf" + "inish_end\030\t \001(\003\022\027\n\017applicationTags\030\n \003(\t" + "\022>\n\005scope\030\013 \001(\0162*.hadoop.yarn.Applicatio" + "nsRequestScopeProto:\003ALL\022\014\n\004name\030\014 \001(\t\"Y" + "\n\034GetApplicationsResponseProto\0229\n\014applic" + "ations\030\001 \003(\0132#.hadoop.yarn.ApplicationRe" + "portProto\"N\n\033GetClusterNodesRequestProto" + "\022/\n\nnodeStates\030\001 \003(\0162\033.hadoop.yarn.NodeS" + "tateProto\"Q\n\034GetClusterNodesResponseProt" + "o\0221\n\013nodeReports\030\001 \003(\0132\034.hadoop.yarn.Nod" + "eReportProto\"y\n\030GetQueueInfoRequestProto" + "\022\021\n\tqueueName\030\001 \001(\t\022\033\n\023includeApplicatio" + "ns\030\002 \001(\010\022\032\n\022includeChildQueues\030\003 \001(\010\022\021\n\t" + "recursive\030\004 \001(\010\"K\n\031GetQueueInfoResponseP" + "roto\022.\n\tqueueInfo\030\001 \001(\0132\033.hadoop.yarn.Qu" + "eueInfoProto\"\"\n GetQueueUserAclsInfoRequ" + "estProto\"^\n!GetQueueUserAclsInfoResponse" + "Proto\0229\n\rqueueUserAcls\030\001 \003(\0132\".hadoop.ya" + "rn.QueueUserACLInfoProto\"\036\n\034GetNodesToLa" + "belsRequestProto\"W\n\035GetNodesToLabelsResp" + "onseProto\0226\n\014nodeToLabels\030\001 \003(\0132 .hadoop" + ".yarn.NodeIdToLabelsProto\"2\n\034GetLabelsTo" + "NodesRequestProto\022\022\n\nnodeLabels\030\001 \003(\t\"Y\n" + "\035GetLabelsToNodesResponseProto\0228\n\rlabels" + "ToNodes\030\001 \003(\0132!.hadoop.yarn.LabelsToNode" + "IdsProto\"\"\n GetClusterNodeLabelsRequestP" + "roto\"r\n!GetClusterNodeLabelsResponseProt" + "o\022\034\n\024deprecatedNodeLabels\030\001 \003(\t\022/\n\nnodeL" + "abels\030\002 \003(\0132\033.hadoop.yarn.NodeLabelProto" + "\"&\n$GetClusterNodeAttributesRequestProto" + "\"d\n%GetClusterNodeAttributesResponseProt" + "o\022;\n\016nodeAttributes\030\001 \003(\0132#.hadoop.yarn." + "NodeAttributeInfoProto\"^\n GetAttributesT" + "oNodesRequestProto\022:\n\016nodeAttributes\030\001 \003" + "(\0132\".hadoop.yarn.NodeAttributeKeyProto\"b" + "\n!GetAttributesToNodesResponseProto\022=\n\021a" + "ttributesToNodes\030\001 \003(\0132\".hadoop.yarn.Att" + "ributeToNodesProto\"5\n GetNodesToAttribut" + "esRequestProto\022\021\n\thostnames\030\001 \003(\t\"b\n!Get" + "NodesToAttributesResponseProto\022=\n\021nodesT" + "oAttributes\030\001 \003(\0132\".hadoop.yarn.NodeToAt" + "tributesProto\"\230\001\n%UpdateApplicationPrior" + "ityRequestProto\0226\n\rapplicationId\030\001 \002(\0132\037" + ".hadoop.yarn.ApplicationIdProto\0227\n\023appli" + "cationPriority\030\002 \002(\0132\032.hadoop.yarn.Prior" + "ityProto\"a\n&UpdateApplicationPriorityRes" + "ponseProto\0227\n\023applicationPriority\030\001 \001(\0132" + "\032.hadoop.yarn.PriorityProto\"\215\001\n\033SignalCo" + "ntainerRequestProto\0223\n\014container_id\030\001 \002(" + "\0132\035.hadoop.yarn.ContainerIdProto\0229\n\007comm" + "and\030\002 \002(\0162(.hadoop.yarn.SignalContainerC" + "ommandProto\"\036\n\034SignalContainerResponsePr" + "oto\"\254\001\n%UpdateApplicationTimeoutsRequest" + "Proto\0226\n\rapplicationId\030\001 \002(\0132\037.hadoop.ya" + "rn.ApplicationIdProto\022K\n\024application_tim" + "eouts\030\002 \003(\0132-.hadoop.yarn.ApplicationUpd" + "ateTimeoutMapProto\"u\n&UpdateApplicationT" + "imeoutsResponseProto\022K\n\024application_time" + "outs\030\001 \003(\0132-.hadoop.yarn.ApplicationUpda" + "teTimeoutMapProto\"$\n\"GetAllResourceProfi" + "lesRequestProto\"d\n#GetAllResourceProfile" + "sResponseProto\022=\n\021resource_profiles\030\001 \002(" + "\0132\".hadoop.yarn.ResourceProfilesProto\"1\n" + "\036GetResourceProfileRequestProto\022\017\n\007profi" + "le\030\001 \002(\t\"P\n\037GetResourceProfileResponsePr" + "oto\022-\n\tresources\030\001 \002(\0132\032.hadoop.yarn.Res" + "ourceProto\"$\n\"GetAllResourceTypeInfoRequ" + "estProto\"e\n#GetAllResourceTypeInfoRespon" + "seProto\022>\n\022resource_type_info\030\001 \003(\0132\".ha" + "doop.yarn.ResourceTypeInfoProto\"\234\001\n\032Star" + "tContainerRequestProto\022J\n\030container_laun" + "ch_context\030\001 \001(\0132(.hadoop.yarn.Container" + "LaunchContextProto\0222\n\017container_token\030\002 " + "\001(\0132\031.hadoop.common.TokenProto\"[\n\033StartC" + "ontainerResponseProto\022<\n\022services_meta_d" + "ata\030\001 \003(\0132 .hadoop.yarn.StringBytesMapPr" + "oto\"P\n\031StopContainerRequestProto\0223\n\014cont" + "ainer_id\030\001 \001(\0132\035.hadoop.yarn.ContainerId" + "Proto\"\034\n\032StopContainerResponseProto\"\232\001\n " + "ResourceLocalizationRequestProto\0223\n\014cont" + "ainer_id\030\001 \001(\0132\035.hadoop.yarn.ContainerId" + "Proto\022A\n\017local_resources\030\002 \003(\0132(.hadoop." + "yarn.StringLocalResourceMapProto\"#\n!Reso" + "urceLocalizationResponseProto\"\277\001\n!ReInit" + "ializeContainerRequestProto\0223\n\014container" + "_id\030\001 \001(\0132\035.hadoop.yarn.ContainerIdProto" + "\022J\n\030container_launch_context\030\002 \001(\0132(.had" + "oop.yarn.ContainerLaunchContextProto\022\031\n\013" + "auto_commit\030\003 \001(\010:\004true\"$\n\"ReInitializeC" + "ontainerResponseProto\"\037\n\035RestartContaine" + "rResponseProto\"\027\n\025RollbackResponseProto\"" + "\025\n\023CommitResponseProto\"g\n\033StartContainer" + "sRequestProto\022H\n\027start_container_request" + "\030\001 \003(\0132\'.hadoop.yarn.StartContainerReque" + "stProto\"\213\001\n\032ContainerExceptionMapProto\0223" + "\n\014container_id\030\001 \001(\0132\035.hadoop.yarn.Conta" + "inerIdProto\0228\n\texception\030\002 \001(\0132%.hadoop." + "yarn.SerializedExceptionProto\"\331\001\n\034StartC" + "ontainersResponseProto\022<\n\022services_meta_" + "data\030\001 \003(\0132 .hadoop.yarn.StringBytesMapP" + "roto\0229\n\022succeeded_requests\030\002 \003(\0132\035.hadoo" + "p.yarn.ContainerIdProto\022@\n\017failed_reques" + "ts\030\003 \003(\0132\'.hadoop.yarn.ContainerExceptio" + "nMapProto\"Q\n\032StopContainersRequestProto\022" + "3\n\014container_id\030\001 \003(\0132\035.hadoop.yarn.Cont" + "ainerIdProto\"\232\001\n\033StopContainersResponseP" + "roto\0229\n\022succeeded_requests\030\001 \003(\0132\035.hadoo" + "p.yarn.ContainerIdProto\022@\n\017failed_reques" + "ts\030\002 \003(\0132\'.hadoop.yarn.ContainerExceptio" + "nMapProto\"W\n GetContainerStatusesRequest" + "Proto\0223\n\014container_id\030\001 \003(\0132\035.hadoop.yar" + "n.ContainerIdProto\"\230\001\n!GetContainerStatu" + "sesResponseProto\0221\n\006status\030\001 \003(\0132!.hadoo" + "p.yarn.ContainerStatusProto\022@\n\017failed_re" + "quests\030\002 \003(\0132\'.hadoop.yarn.ContainerExce" + "ptionMapProto\"`\n&IncreaseContainersResou" + "rceRequestProto\0226\n\023increase_containers\030\001" + " \003(\0132\031.hadoop.common.TokenProto\"\246\001\n\'Incr" + "easeContainersResourceResponseProto\0229\n\022s" + "ucceeded_requests\030\001 \003(\0132\035.hadoop.yarn.Co" + "ntainerIdProto\022@\n\017failed_requests\030\002 \003(\0132" + "\'.hadoop.yarn.ContainerExceptionMapProto" + "\"X\n\033ContainerUpdateRequestProto\0229\n\026updat" + "e_container_token\030\001 \003(\0132\031.hadoop.common." + "TokenProto\"\233\001\n\034ContainerUpdateResponsePr" + "oto\0229\n\022succeeded_requests\030\001 \003(\0132\035.hadoop" + ".yarn.ContainerIdProto\022@\n\017failed_request" + "s\030\002 \003(\0132\'.hadoop.yarn.ContainerException" + "MapProto\"q\n\'GetApplicationAttemptReportR" + "equestProto\022F\n\026application_attempt_id\030\001 " + "\001(\0132&.hadoop.yarn.ApplicationAttemptIdPr" + "oto\"z\n(GetApplicationAttemptReportRespon" + "seProto\022N\n\032application_attempt_report\030\001 " + "\001(\0132*.hadoop.yarn.ApplicationAttemptRepo" + "rtProto\"]\n\"GetApplicationAttemptsRequest" + "Proto\0227\n\016application_id\030\001 \001(\0132\037.hadoop.y" + "arn.ApplicationIdProto\"o\n#GetApplication" + "AttemptsResponseProto\022H\n\024application_att" + "empts\030\001 \003(\0132*.hadoop.yarn.ApplicationAtt" + "emptReportProto\"U\n\036GetContainerReportReq" + "uestProto\0223\n\014container_id\030\001 \001(\0132\035.hadoop" + ".yarn.ContainerIdProto\"^\n\037GetContainerRe" + "portResponseProto\022;\n\020container_report\030\001 " + "\001(\0132!.hadoop.yarn.ContainerReportProto\"c" + "\n\031GetContainersRequestProto\022F\n\026applicati" + "on_attempt_id\030\001 \001(\0132&.hadoop.yarn.Applic" + "ationAttemptIdProto\"S\n\032GetContainersResp" + "onseProto\0225\n\ncontainers\030\001 \003(\0132!.hadoop.y" + "arn.ContainerReportProto\"q\n\"UseSharedCac" + "heResourceRequestProto\0226\n\rapplicationId\030" + "\001 \001(\0132\037.hadoop.yarn.ApplicationIdProto\022\023" + "\n\013resourceKey\030\002 \001(\t\"3\n#UseSharedCacheRes" + "ourceResponseProto\022\014\n\004path\030\001 \001(\t\"u\n&Rele" + "aseSharedCacheResourceRequestProto\0226\n\rap" + "plicationId\030\001 \001(\0132\037.hadoop.yarn.Applicat" + "ionIdProto\022\023\n\013resourceKey\030\002 \001(\t\")\n\'Relea" + "seSharedCacheResourceResponseProto\"\037\n\035Ge" + "tNewReservationRequestProto\"Y\n\036GetNewRes" + "ervationResponseProto\0227\n\016reservation_id\030" + "\001 \001(\0132\037.hadoop.yarn.ReservationIdProto\"\264" + "\001\n!ReservationSubmissionRequestProto\022\r\n\005" + "queue\030\001 \001(\t\022G\n\026reservation_definition\030\002 " + "\001(\0132\'.hadoop.yarn.ReservationDefinitionP" + "roto\0227\n\016reservation_id\030\003 \001(\0132\037.hadoop.ya" + "rn.ReservationIdProto\"$\n\"ReservationSubm" + "issionResponseProto\"\241\001\n\035ReservationUpdat" + "eRequestProto\022G\n\026reservation_definition\030" + "\001 \001(\0132\'.hadoop.yarn.ReservationDefinitio" + "nProto\0227\n\016reservation_id\030\002 \001(\0132\037.hadoop." + "yarn.ReservationIdProto\" \n\036ReservationUp" + "dateResponseProto\"X\n\035ReservationDeleteRe" + "questProto\0227\n\016reservation_id\030\001 \001(\0132\037.had" + "oop.yarn.ReservationIdProto\" \n\036Reservati" + "onDeleteResponseProto\"\220\001\n\033ReservationLis" + "tRequestProto\022\r\n\005queue\030\001 \001(\t\022\026\n\016reservat" + "ion_id\030\003 \001(\t\022\022\n\nstart_time\030\004 \001(\003\022\020\n\010end_" + "time\030\005 \001(\003\022$\n\034include_resource_allocatio" + "ns\030\006 \001(\010\"b\n\034ReservationListResponseProto" + "\022B\n\014reservations\030\001 \003(\0132,.hadoop.yarn.Res" + "ervationAllocationStateProto\"\'\n%RunShare" + "dCacheCleanerTaskRequestProto\":\n&RunShar" + "edCacheCleanerTaskResponseProto\022\020\n\010accep" + "ted\030\001 \001(\010\"Z\n#GetLocalizationStatusesRequ" + "estProto\0223\n\014container_id\030\001 \003(\0132\035.hadoop." + "yarn.ContainerIdProto\"\275\001\n$GetLocalizatio" + "nStatusesResponseProto\022S\n\032cntn_localizat" + "ion_statuses\030\001 \003(\0132/.hadoop.yarn.Contain" + "erLocalizationStatusesProto\022@\n\017failed_re" + "quests\030\002 \003(\0132\'.hadoop.yarn.ContainerExce" + "ptionMapProto\"\205\001\n\027LocalizationStatusProt" + "o\022\024\n\014resource_key\030\001 \001(\t\022?\n\022localization_" + "state\030\002 \001(\0162#.hadoop.yarn.LocalizationSt" + "ateProto\022\023\n\013diagnostics\030\003 \001(\t\"\236\001\n\"Contai" + "nerLocalizationStatusesProto\0223\n\014containe" + "r_id\030\001 \001(\0132\035.hadoop.yarn.ContainerIdProt" + "o\022C\n\025localization_statuses\030\002 \003(\0132$.hadoo" + "p.yarn.LocalizationStatusProto*\177\n\030Contai" + "nerUpdateTypeProto\022\025\n\021INCREASE_RESOURCE\020" + "\000\022\025\n\021DECREASE_RESOURCE\020\001\022\032\n\026PROMOTE_EXEC" + "UTION_TYPE\020\002\022\031\n\025DEMOTE_EXECUTION_TYPE\020\003*" + "-\n\026SchedulerResourceTypes\022\n\n\006MEMORY\020\000\022\007\n" + "\003CPU\020\001*?\n\035ApplicationsRequestScopeProto\022" + "\007\n\003ALL\020\000\022\014\n\010VIEWABLE\020\001\022\007\n\003OWN\020\002*F\n\026Local" + "izationStateProto\022\r\n\tL_PENDING\020\001\022\017\n\013L_CO" + "MPLETED\020\002\022\014\n\010L_FAILED\020\003B7\n\034org.apache.ha" + "doop.yarn.protoB\021YarnServiceProtos\210\001\001\240\001\001" }; org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(), org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(), }, assigner); internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RegisterApplicationMasterRequestProto_descriptor, new java.lang.String[] { "Host", "RpcPort", "TrackingUrl", "PlacementConstraints", }); internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RegisterApplicationMasterResponseProto_descriptor, new java.lang.String[] { "MaximumCapability", "ClientToAmTokenMasterKey", "ApplicationACLs", "ContainersFromPreviousAttempts", "Queue", "NmTokensFromPreviousAttempts", "SchedulerResourceTypes", "ResourceProfiles", "ResourceTypes", }); internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FinishApplicationMasterRequestProto_descriptor, new java.lang.String[] { "Diagnostics", "TrackingUrl", "FinalApplicationStatus", }); internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FinishApplicationMasterResponseProto_descriptor, new java.lang.String[] { "IsUnregistered", }); internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hadoop_yarn_UpdateContainerRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateContainerRequestProto_descriptor, new java.lang.String[] { "ContainerVersion", "ContainerId", "UpdateType", "Capability", "ExecutionType", }); internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hadoop_yarn_UpdateContainerErrorProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateContainerErrorProto_descriptor, new java.lang.String[] { "Reason", "UpdateRequest", "CurrentContainerVersion", }); internal_static_hadoop_yarn_AllocateRequestProto_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hadoop_yarn_AllocateRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_AllocateRequestProto_descriptor, new java.lang.String[] { "Ask", "Release", "BlacklistRequest", "ResponseId", "Progress", "UpdateRequests", "SchedulingRequests", "TrackingUrl", }); internal_static_hadoop_yarn_NMTokenProto_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hadoop_yarn_NMTokenProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NMTokenProto_descriptor, new java.lang.String[] { "NodeId", "Token", }); internal_static_hadoop_yarn_UpdatedContainerProto_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hadoop_yarn_UpdatedContainerProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdatedContainerProto_descriptor, new java.lang.String[] { "UpdateType", "Container", }); internal_static_hadoop_yarn_AllocateResponseProto_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hadoop_yarn_AllocateResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_AllocateResponseProto_descriptor, new java.lang.String[] { "AMCommand", "ResponseId", "AllocatedContainers", "CompletedContainerStatuses", "Limit", "UpdatedNodes", "NumClusterNodes", "Preempt", "NmTokens", "AmRmToken", "ApplicationPriority", "CollectorInfo", "UpdateErrors", "UpdatedContainers", "ContainersFromPreviousAttempts", "RejectedSchedulingRequests", }); internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hadoop_yarn_GetNewApplicationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNewApplicationRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hadoop_yarn_GetNewApplicationResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNewApplicationResponseProto_descriptor, new java.lang.String[] { "ApplicationId", "MaximumCapability", }); internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hadoop_yarn_GetApplicationReportRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationReportRequestProto_descriptor, new java.lang.String[] { "ApplicationId", }); internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hadoop_yarn_GetApplicationReportResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationReportResponseProto_descriptor, new java.lang.String[] { "ApplicationReport", }); internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hadoop_yarn_SubmitApplicationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SubmitApplicationRequestProto_descriptor, new java.lang.String[] { "ApplicationSubmissionContext", }); internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hadoop_yarn_SubmitApplicationResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SubmitApplicationResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FailApplicationAttemptRequestProto_descriptor, new java.lang.String[] { "ApplicationAttemptId", }); internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FailApplicationAttemptResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hadoop_yarn_KillApplicationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_KillApplicationRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "Diagnostics", }); internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hadoop_yarn_KillApplicationResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_KillApplicationResponseProto_descriptor, new java.lang.String[] { "IsKillCompleted", }); internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hadoop_yarn_GetClusterMetricsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterMetricsRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hadoop_yarn_GetClusterMetricsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterMetricsResponseProto_descriptor, new java.lang.String[] { "ClusterMetrics", }); internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_MoveApplicationAcrossQueuesRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "TargetQueue", }); internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_MoveApplicationAcrossQueuesResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_hadoop_yarn_GetApplicationsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationsRequestProto_descriptor, new java.lang.String[] { "ApplicationTypes", "ApplicationStates", "Users", "Queues", "Limit", "StartBegin", "StartEnd", "FinishBegin", "FinishEnd", "ApplicationTags", "Scope", "Name", }); internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_hadoop_yarn_GetApplicationsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationsResponseProto_descriptor, new java.lang.String[] { "Applications", }); internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_hadoop_yarn_GetClusterNodesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodesRequestProto_descriptor, new java.lang.String[] { "NodeStates", }); internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_hadoop_yarn_GetClusterNodesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodesResponseProto_descriptor, new java.lang.String[] { "NodeReports", }); internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_hadoop_yarn_GetQueueInfoRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetQueueInfoRequestProto_descriptor, new java.lang.String[] { "QueueName", "IncludeApplications", "IncludeChildQueues", "Recursive", }); internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_hadoop_yarn_GetQueueInfoResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetQueueInfoResponseProto_descriptor, new java.lang.String[] { "QueueInfo", }); internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetQueueUserAclsInfoRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetQueueUserAclsInfoResponseProto_descriptor, new java.lang.String[] { "QueueUserAcls", }); internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNodesToLabelsRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor = getDescriptor().getMessageTypes().get(33); internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNodesToLabelsResponseProto_descriptor, new java.lang.String[] { "NodeToLabels", }); internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetLabelsToNodesRequestProto_descriptor, new java.lang.String[] { "NodeLabels", }); internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor = getDescriptor().getMessageTypes().get(35); internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetLabelsToNodesResponseProto_descriptor, new java.lang.String[] { "LabelsToNodes", }); internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodeLabelsRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor = getDescriptor().getMessageTypes().get(37); internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodeLabelsResponseProto_descriptor, new java.lang.String[] { "DeprecatedNodeLabels", "NodeLabels", }); internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor = getDescriptor().getMessageTypes().get(38); internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodeAttributesRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor = getDescriptor().getMessageTypes().get(39); internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetClusterNodeAttributesResponseProto_descriptor, new java.lang.String[] { "NodeAttributes", }); internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor = getDescriptor().getMessageTypes().get(40); internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAttributesToNodesRequestProto_descriptor, new java.lang.String[] { "NodeAttributes", }); internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor = getDescriptor().getMessageTypes().get(41); internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAttributesToNodesResponseProto_descriptor, new java.lang.String[] { "AttributesToNodes", }); internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor = getDescriptor().getMessageTypes().get(42); internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNodesToAttributesRequestProto_descriptor, new java.lang.String[] { "Hostnames", }); internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor = getDescriptor().getMessageTypes().get(43); internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNodesToAttributesResponseProto_descriptor, new java.lang.String[] { "NodesToAttributes", }); internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor = getDescriptor().getMessageTypes().get(44); internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateApplicationPriorityRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "ApplicationPriority", }); internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor = getDescriptor().getMessageTypes().get(45); internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateApplicationPriorityResponseProto_descriptor, new java.lang.String[] { "ApplicationPriority", }); internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor = getDescriptor().getMessageTypes().get(46); internal_static_hadoop_yarn_SignalContainerRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SignalContainerRequestProto_descriptor, new java.lang.String[] { "ContainerId", "Command", }); internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor = getDescriptor().getMessageTypes().get(47); internal_static_hadoop_yarn_SignalContainerResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SignalContainerResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor = getDescriptor().getMessageTypes().get(48); internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateApplicationTimeoutsRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "ApplicationTimeouts", }); internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor = getDescriptor().getMessageTypes().get(49); internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UpdateApplicationTimeoutsResponseProto_descriptor, new java.lang.String[] { "ApplicationTimeouts", }); internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor = getDescriptor().getMessageTypes().get(50); internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAllResourceProfilesRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor = getDescriptor().getMessageTypes().get(51); internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAllResourceProfilesResponseProto_descriptor, new java.lang.String[] { "ResourceProfiles", }); internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor = getDescriptor().getMessageTypes().get(52); internal_static_hadoop_yarn_GetResourceProfileRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetResourceProfileRequestProto_descriptor, new java.lang.String[] { "Profile", }); internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor = getDescriptor().getMessageTypes().get(53); internal_static_hadoop_yarn_GetResourceProfileResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetResourceProfileResponseProto_descriptor, new java.lang.String[] { "Resources", }); internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor = getDescriptor().getMessageTypes().get(54); internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAllResourceTypeInfoRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor = getDescriptor().getMessageTypes().get(55); internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetAllResourceTypeInfoResponseProto_descriptor, new java.lang.String[] { "ResourceTypeInfo", }); internal_static_hadoop_yarn_StartContainerRequestProto_descriptor = getDescriptor().getMessageTypes().get(56); internal_static_hadoop_yarn_StartContainerRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StartContainerRequestProto_descriptor, new java.lang.String[] { "ContainerLaunchContext", "ContainerToken", }); internal_static_hadoop_yarn_StartContainerResponseProto_descriptor = getDescriptor().getMessageTypes().get(57); internal_static_hadoop_yarn_StartContainerResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StartContainerResponseProto_descriptor, new java.lang.String[] { "ServicesMetaData", }); internal_static_hadoop_yarn_StopContainerRequestProto_descriptor = getDescriptor().getMessageTypes().get(58); internal_static_hadoop_yarn_StopContainerRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StopContainerRequestProto_descriptor, new java.lang.String[] { "ContainerId", }); internal_static_hadoop_yarn_StopContainerResponseProto_descriptor = getDescriptor().getMessageTypes().get(59); internal_static_hadoop_yarn_StopContainerResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StopContainerResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor = getDescriptor().getMessageTypes().get(60); internal_static_hadoop_yarn_ResourceLocalizationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceLocalizationRequestProto_descriptor, new java.lang.String[] { "ContainerId", "LocalResources", }); internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor = getDescriptor().getMessageTypes().get(61); internal_static_hadoop_yarn_ResourceLocalizationResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceLocalizationResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor = getDescriptor().getMessageTypes().get(62); internal_static_hadoop_yarn_ReInitializeContainerRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReInitializeContainerRequestProto_descriptor, new java.lang.String[] { "ContainerId", "ContainerLaunchContext", "AutoCommit", }); internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor = getDescriptor().getMessageTypes().get(63); internal_static_hadoop_yarn_ReInitializeContainerResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReInitializeContainerResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor = getDescriptor().getMessageTypes().get(64); internal_static_hadoop_yarn_RestartContainerResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RestartContainerResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_RollbackResponseProto_descriptor = getDescriptor().getMessageTypes().get(65); internal_static_hadoop_yarn_RollbackResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RollbackResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_CommitResponseProto_descriptor = getDescriptor().getMessageTypes().get(66); internal_static_hadoop_yarn_CommitResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_CommitResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_StartContainersRequestProto_descriptor = getDescriptor().getMessageTypes().get(67); internal_static_hadoop_yarn_StartContainersRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StartContainersRequestProto_descriptor, new java.lang.String[] { "StartContainerRequest", }); internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor = getDescriptor().getMessageTypes().get(68); internal_static_hadoop_yarn_ContainerExceptionMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerExceptionMapProto_descriptor, new java.lang.String[] { "ContainerId", "Exception", }); internal_static_hadoop_yarn_StartContainersResponseProto_descriptor = getDescriptor().getMessageTypes().get(69); internal_static_hadoop_yarn_StartContainersResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StartContainersResponseProto_descriptor, new java.lang.String[] { "ServicesMetaData", "SucceededRequests", "FailedRequests", }); internal_static_hadoop_yarn_StopContainersRequestProto_descriptor = getDescriptor().getMessageTypes().get(70); internal_static_hadoop_yarn_StopContainersRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StopContainersRequestProto_descriptor, new java.lang.String[] { "ContainerId", }); internal_static_hadoop_yarn_StopContainersResponseProto_descriptor = getDescriptor().getMessageTypes().get(71); internal_static_hadoop_yarn_StopContainersResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StopContainersResponseProto_descriptor, new java.lang.String[] { "SucceededRequests", "FailedRequests", }); internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor = getDescriptor().getMessageTypes().get(72); internal_static_hadoop_yarn_GetContainerStatusesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainerStatusesRequestProto_descriptor, new java.lang.String[] { "ContainerId", }); internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor = getDescriptor().getMessageTypes().get(73); internal_static_hadoop_yarn_GetContainerStatusesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainerStatusesResponseProto_descriptor, new java.lang.String[] { "Status", "FailedRequests", }); internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor = getDescriptor().getMessageTypes().get(74); internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_IncreaseContainersResourceRequestProto_descriptor, new java.lang.String[] { "IncreaseContainers", }); internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor = getDescriptor().getMessageTypes().get(75); internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_IncreaseContainersResourceResponseProto_descriptor, new java.lang.String[] { "SucceededRequests", "FailedRequests", }); internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor = getDescriptor().getMessageTypes().get(76); internal_static_hadoop_yarn_ContainerUpdateRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerUpdateRequestProto_descriptor, new java.lang.String[] { "UpdateContainerToken", }); internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor = getDescriptor().getMessageTypes().get(77); internal_static_hadoop_yarn_ContainerUpdateResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerUpdateResponseProto_descriptor, new java.lang.String[] { "SucceededRequests", "FailedRequests", }); internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor = getDescriptor().getMessageTypes().get(78); internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationAttemptReportRequestProto_descriptor, new java.lang.String[] { "ApplicationAttemptId", }); internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor = getDescriptor().getMessageTypes().get(79); internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationAttemptReportResponseProto_descriptor, new java.lang.String[] { "ApplicationAttemptReport", }); internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor = getDescriptor().getMessageTypes().get(80); internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationAttemptsRequestProto_descriptor, new java.lang.String[] { "ApplicationId", }); internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor = getDescriptor().getMessageTypes().get(81); internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetApplicationAttemptsResponseProto_descriptor, new java.lang.String[] { "ApplicationAttempts", }); internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor = getDescriptor().getMessageTypes().get(82); internal_static_hadoop_yarn_GetContainerReportRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainerReportRequestProto_descriptor, new java.lang.String[] { "ContainerId", }); internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor = getDescriptor().getMessageTypes().get(83); internal_static_hadoop_yarn_GetContainerReportResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainerReportResponseProto_descriptor, new java.lang.String[] { "ContainerReport", }); internal_static_hadoop_yarn_GetContainersRequestProto_descriptor = getDescriptor().getMessageTypes().get(84); internal_static_hadoop_yarn_GetContainersRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainersRequestProto_descriptor, new java.lang.String[] { "ApplicationAttemptId", }); internal_static_hadoop_yarn_GetContainersResponseProto_descriptor = getDescriptor().getMessageTypes().get(85); internal_static_hadoop_yarn_GetContainersResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetContainersResponseProto_descriptor, new java.lang.String[] { "Containers", }); internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor = getDescriptor().getMessageTypes().get(86); internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UseSharedCacheResourceRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "ResourceKey", }); internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor = getDescriptor().getMessageTypes().get(87); internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_UseSharedCacheResourceResponseProto_descriptor, new java.lang.String[] { "Path", }); internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor = getDescriptor().getMessageTypes().get(88); internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReleaseSharedCacheResourceRequestProto_descriptor, new java.lang.String[] { "ApplicationId", "ResourceKey", }); internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor = getDescriptor().getMessageTypes().get(89); internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReleaseSharedCacheResourceResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor = getDescriptor().getMessageTypes().get(90); internal_static_hadoop_yarn_GetNewReservationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNewReservationRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor = getDescriptor().getMessageTypes().get(91); internal_static_hadoop_yarn_GetNewReservationResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetNewReservationResponseProto_descriptor, new java.lang.String[] { "ReservationId", }); internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor = getDescriptor().getMessageTypes().get(92); internal_static_hadoop_yarn_ReservationSubmissionRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationSubmissionRequestProto_descriptor, new java.lang.String[] { "Queue", "ReservationDefinition", "ReservationId", }); internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor = getDescriptor().getMessageTypes().get(93); internal_static_hadoop_yarn_ReservationSubmissionResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationSubmissionResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor = getDescriptor().getMessageTypes().get(94); internal_static_hadoop_yarn_ReservationUpdateRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationUpdateRequestProto_descriptor, new java.lang.String[] { "ReservationDefinition", "ReservationId", }); internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor = getDescriptor().getMessageTypes().get(95); internal_static_hadoop_yarn_ReservationUpdateResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationUpdateResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor = getDescriptor().getMessageTypes().get(96); internal_static_hadoop_yarn_ReservationDeleteRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationDeleteRequestProto_descriptor, new java.lang.String[] { "ReservationId", }); internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor = getDescriptor().getMessageTypes().get(97); internal_static_hadoop_yarn_ReservationDeleteResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationDeleteResponseProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_ReservationListRequestProto_descriptor = getDescriptor().getMessageTypes().get(98); internal_static_hadoop_yarn_ReservationListRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationListRequestProto_descriptor, new java.lang.String[] { "Queue", "ReservationId", "StartTime", "EndTime", "IncludeResourceAllocations", }); internal_static_hadoop_yarn_ReservationListResponseProto_descriptor = getDescriptor().getMessageTypes().get(99); internal_static_hadoop_yarn_ReservationListResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationListResponseProto_descriptor, new java.lang.String[] { "Reservations", }); internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor = getDescriptor().getMessageTypes().get(100); internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RunSharedCacheCleanerTaskRequestProto_descriptor, new java.lang.String[] { }); internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor = getDescriptor().getMessageTypes().get(101); internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RunSharedCacheCleanerTaskResponseProto_descriptor, new java.lang.String[] { "Accepted", }); internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor = getDescriptor().getMessageTypes().get(102); internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetLocalizationStatusesRequestProto_descriptor, new java.lang.String[] { "ContainerId", }); internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor = getDescriptor().getMessageTypes().get(103); internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_GetLocalizationStatusesResponseProto_descriptor, new java.lang.String[] { "CntnLocalizationStatuses", "FailedRequests", }); internal_static_hadoop_yarn_LocalizationStatusProto_descriptor = getDescriptor().getMessageTypes().get(104); internal_static_hadoop_yarn_LocalizationStatusProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_LocalizationStatusProto_descriptor, new java.lang.String[] { "ResourceKey", "LocalizationState", "Diagnostics", }); internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor = getDescriptor().getMessageTypes().get(105); internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerLocalizationStatusesProto_descriptor, new java.lang.String[] { "ContainerId", "LocalizationStatuses", }); org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(); org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy