All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.yarn.proto.YarnProtos Maven / Gradle / Ivy

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_protos.proto

package org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto;

public final class YarnProtos {
  private YarnProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hadoop.yarn.ResourceTypesProto}
   */
  public enum ResourceTypesProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * COUNTABLE = 0;
     */
    COUNTABLE(0),
    ;

    /**
     * COUNTABLE = 0;
     */
    public static final int COUNTABLE_VALUE = 0;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ResourceTypesProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ResourceTypesProto forNumber(int value) {
      switch (value) {
        case 0: return COUNTABLE;
        default: return null;
      }
    }

    public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ResourceTypesProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public ResourceTypesProto findValueByNumber(int number) {
              return ResourceTypesProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final ResourceTypesProto[] VALUES = values();

    public static ResourceTypesProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ResourceTypesProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ResourceTypesProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerStateProto}
   */
  public enum ContainerStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * C_NEW = 1;
     */
    C_NEW(1),
    /**
     * C_RUNNING = 2;
     */
    C_RUNNING(2),
    /**
     * C_COMPLETE = 3;
     */
    C_COMPLETE(3),
    ;

    /**
     * C_NEW = 1;
     */
    public static final int C_NEW_VALUE = 1;
    /**
     * C_RUNNING = 2;
     */
    public static final int C_RUNNING_VALUE = 2;
    /**
     * C_COMPLETE = 3;
     */
    public static final int C_COMPLETE_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerStateProto forNumber(int value) {
      switch (value) {
        case 1: return C_NEW;
        case 2: return C_RUNNING;
        case 3: return C_COMPLETE;
        default: return null;
      }
    }

    public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
            public ContainerStateProto findValueByNumber(int number) {
              return ContainerStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(1);
    }

    private static final ContainerStateProto[] VALUES = values();

    public static ContainerStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerSubStateProto}
   */
  public enum ContainerSubStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * 
     **
     * NEW, LOCALIZING, SCHEDULED,
     * REINITIALIZING_AWAITING_KILL, RELAUNCHING,
     * 
* * CSS_SCHEDULED = 1; */ CSS_SCHEDULED(1), /** *
     **
     * RUNNING, REINITIALIZING, PAUSING, KILLING
     * 
* * CSS_RUNNING = 2; */ CSS_RUNNING(2), /** *
     **
     * PAUSED, RESUMING
     * 
* * CSS_PAUSED = 3; */ CSS_PAUSED(3), /** *
     **
     * LOCALIZATION_FAILED, EXITED_WITH_SUCCESS,
     * EXITED_WITH_FAILURE,
     * CONTAINER_CLEANEDUP_AFTER_KILL,
     * CONTAINER_RESOURCES_CLEANINGUP
     * 
* * CSS_COMPLETING = 4; */ CSS_COMPLETING(4), /** *
     **
     * DONE
     * 
* * CSS_DONE = 5; */ CSS_DONE(5), ; /** *
     **
     * NEW, LOCALIZING, SCHEDULED,
     * REINITIALIZING_AWAITING_KILL, RELAUNCHING,
     * 
* * CSS_SCHEDULED = 1; */ public static final int CSS_SCHEDULED_VALUE = 1; /** *
     **
     * RUNNING, REINITIALIZING, PAUSING, KILLING
     * 
* * CSS_RUNNING = 2; */ public static final int CSS_RUNNING_VALUE = 2; /** *
     **
     * PAUSED, RESUMING
     * 
* * CSS_PAUSED = 3; */ public static final int CSS_PAUSED_VALUE = 3; /** *
     **
     * LOCALIZATION_FAILED, EXITED_WITH_SUCCESS,
     * EXITED_WITH_FAILURE,
     * CONTAINER_CLEANEDUP_AFTER_KILL,
     * CONTAINER_RESOURCES_CLEANINGUP
     * 
* * CSS_COMPLETING = 4; */ public static final int CSS_COMPLETING_VALUE = 4; /** *
     **
     * DONE
     * 
* * CSS_DONE = 5; */ public static final int CSS_DONE_VALUE = 5; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ContainerSubStateProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ContainerSubStateProto forNumber(int value) { switch (value) { case 1: return CSS_SCHEDULED; case 2: return CSS_RUNNING; case 3: return CSS_PAUSED; case 4: return CSS_COMPLETING; case 5: return CSS_DONE; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ContainerSubStateProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ContainerSubStateProto findValueByNumber(int number) { return ContainerSubStateProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(2); } private static final ContainerSubStateProto[] VALUES = values(); public static ContainerSubStateProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ContainerSubStateProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerSubStateProto) } /** * Protobuf enum {@code hadoop.yarn.YarnApplicationStateProto} */ public enum YarnApplicationStateProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NEW = 1; */ NEW(1), /** * NEW_SAVING = 2; */ NEW_SAVING(2), /** * SUBMITTED = 3; */ SUBMITTED(3), /** * ACCEPTED = 4; */ ACCEPTED(4), /** * RUNNING = 5; */ RUNNING(5), /** * FINISHED = 6; */ FINISHED(6), /** * FAILED = 7; */ FAILED(7), /** * KILLED = 8; */ KILLED(8), ; /** * NEW = 1; */ public static final int NEW_VALUE = 1; /** * NEW_SAVING = 2; */ public static final int NEW_SAVING_VALUE = 2; /** * SUBMITTED = 3; */ public static final int SUBMITTED_VALUE = 3; /** * ACCEPTED = 4; */ public static final int ACCEPTED_VALUE = 4; /** * RUNNING = 5; */ public static final int RUNNING_VALUE = 5; /** * FINISHED = 6; */ public static final int FINISHED_VALUE = 6; /** * FAILED = 7; */ public static final int FAILED_VALUE = 7; /** * KILLED = 8; */ public static final int KILLED_VALUE = 8; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static YarnApplicationStateProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static YarnApplicationStateProto forNumber(int value) { switch (value) { case 1: return NEW; case 2: return NEW_SAVING; case 3: return SUBMITTED; case 4: return ACCEPTED; case 5: return RUNNING; case 6: return FINISHED; case 7: return FAILED; case 8: return KILLED; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< YarnApplicationStateProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public YarnApplicationStateProto findValueByNumber(int number) { return YarnApplicationStateProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(3); } private static final YarnApplicationStateProto[] VALUES = values(); public static YarnApplicationStateProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private YarnApplicationStateProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.YarnApplicationStateProto) } /** * Protobuf enum {@code hadoop.yarn.YarnApplicationAttemptStateProto} */ public enum YarnApplicationAttemptStateProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * APP_ATTEMPT_NEW = 1; */ APP_ATTEMPT_NEW(1), /** * APP_ATTEMPT_SUBMITTED = 2; */ APP_ATTEMPT_SUBMITTED(2), /** * APP_ATTEMPT_SCHEDULED = 3; */ APP_ATTEMPT_SCHEDULED(3), /** * APP_ATTEMPT_ALLOCATED_SAVING = 4; */ APP_ATTEMPT_ALLOCATED_SAVING(4), /** * APP_ATTEMPT_ALLOCATED = 5; */ APP_ATTEMPT_ALLOCATED(5), /** * APP_ATTEMPT_LAUNCHED = 6; */ APP_ATTEMPT_LAUNCHED(6), /** * APP_ATTEMPT_FAILED = 7; */ APP_ATTEMPT_FAILED(7), /** * APP_ATTEMPT_RUNNING = 8; */ APP_ATTEMPT_RUNNING(8), /** * APP_ATTEMPT_FINISHING = 9; */ APP_ATTEMPT_FINISHING(9), /** * APP_ATTEMPT_FINISHED = 10; */ APP_ATTEMPT_FINISHED(10), /** * APP_ATTEMPT_KILLED = 11; */ APP_ATTEMPT_KILLED(11), ; /** * APP_ATTEMPT_NEW = 1; */ public static final int APP_ATTEMPT_NEW_VALUE = 1; /** * APP_ATTEMPT_SUBMITTED = 2; */ public static final int APP_ATTEMPT_SUBMITTED_VALUE = 2; /** * APP_ATTEMPT_SCHEDULED = 3; */ public static final int APP_ATTEMPT_SCHEDULED_VALUE = 3; /** * APP_ATTEMPT_ALLOCATED_SAVING = 4; */ public static final int APP_ATTEMPT_ALLOCATED_SAVING_VALUE = 4; /** * APP_ATTEMPT_ALLOCATED = 5; */ public static final int APP_ATTEMPT_ALLOCATED_VALUE = 5; /** * APP_ATTEMPT_LAUNCHED = 6; */ public static final int APP_ATTEMPT_LAUNCHED_VALUE = 6; /** * APP_ATTEMPT_FAILED = 7; */ public static final int APP_ATTEMPT_FAILED_VALUE = 7; /** * APP_ATTEMPT_RUNNING = 8; */ public static final int APP_ATTEMPT_RUNNING_VALUE = 8; /** * APP_ATTEMPT_FINISHING = 9; */ public static final int APP_ATTEMPT_FINISHING_VALUE = 9; /** * APP_ATTEMPT_FINISHED = 10; */ public static final int APP_ATTEMPT_FINISHED_VALUE = 10; /** * APP_ATTEMPT_KILLED = 11; */ public static final int APP_ATTEMPT_KILLED_VALUE = 11; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static YarnApplicationAttemptStateProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static YarnApplicationAttemptStateProto forNumber(int value) { switch (value) { case 1: return APP_ATTEMPT_NEW; case 2: return APP_ATTEMPT_SUBMITTED; case 3: return APP_ATTEMPT_SCHEDULED; case 4: return APP_ATTEMPT_ALLOCATED_SAVING; case 5: return APP_ATTEMPT_ALLOCATED; case 6: return APP_ATTEMPT_LAUNCHED; case 7: return APP_ATTEMPT_FAILED; case 8: return APP_ATTEMPT_RUNNING; case 9: return APP_ATTEMPT_FINISHING; case 10: return APP_ATTEMPT_FINISHED; case 11: return APP_ATTEMPT_KILLED; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< YarnApplicationAttemptStateProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public YarnApplicationAttemptStateProto findValueByNumber(int number) { return YarnApplicationAttemptStateProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(4); } private static final YarnApplicationAttemptStateProto[] VALUES = values(); public static YarnApplicationAttemptStateProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private YarnApplicationAttemptStateProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.YarnApplicationAttemptStateProto) } /** * Protobuf enum {@code hadoop.yarn.FinalApplicationStatusProto} */ public enum FinalApplicationStatusProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * APP_UNDEFINED = 0; */ APP_UNDEFINED(0), /** * APP_SUCCEEDED = 1; */ APP_SUCCEEDED(1), /** * APP_FAILED = 2; */ APP_FAILED(2), /** * APP_KILLED = 3; */ APP_KILLED(3), /** * APP_ENDED = 4; */ APP_ENDED(4), ; /** * APP_UNDEFINED = 0; */ public static final int APP_UNDEFINED_VALUE = 0; /** * APP_SUCCEEDED = 1; */ public static final int APP_SUCCEEDED_VALUE = 1; /** * APP_FAILED = 2; */ public static final int APP_FAILED_VALUE = 2; /** * APP_KILLED = 3; */ public static final int APP_KILLED_VALUE = 3; /** * APP_ENDED = 4; */ public static final int APP_ENDED_VALUE = 4; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static FinalApplicationStatusProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static FinalApplicationStatusProto forNumber(int value) { switch (value) { case 0: return APP_UNDEFINED; case 1: return APP_SUCCEEDED; case 2: return APP_FAILED; case 3: return APP_KILLED; case 4: return APP_ENDED; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< FinalApplicationStatusProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public FinalApplicationStatusProto findValueByNumber(int number) { return FinalApplicationStatusProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(5); } private static final FinalApplicationStatusProto[] VALUES = values(); public static FinalApplicationStatusProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private FinalApplicationStatusProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.FinalApplicationStatusProto) } /** * Protobuf enum {@code hadoop.yarn.LocalResourceVisibilityProto} */ public enum LocalResourceVisibilityProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * PUBLIC = 1; */ PUBLIC(1), /** * PRIVATE = 2; */ PRIVATE(2), /** * APPLICATION = 3; */ APPLICATION(3), ; /** * PUBLIC = 1; */ public static final int PUBLIC_VALUE = 1; /** * PRIVATE = 2; */ public static final int PRIVATE_VALUE = 2; /** * APPLICATION = 3; */ public static final int APPLICATION_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static LocalResourceVisibilityProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static LocalResourceVisibilityProto forNumber(int value) { switch (value) { case 1: return PUBLIC; case 2: return PRIVATE; case 3: return APPLICATION; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< LocalResourceVisibilityProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public LocalResourceVisibilityProto findValueByNumber(int number) { return LocalResourceVisibilityProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(6); } private static final LocalResourceVisibilityProto[] VALUES = values(); public static LocalResourceVisibilityProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private LocalResourceVisibilityProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalResourceVisibilityProto) } /** * Protobuf enum {@code hadoop.yarn.LocalResourceTypeProto} */ public enum LocalResourceTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * ARCHIVE = 1; */ ARCHIVE(1), /** * FILE = 2; */ FILE(2), /** * PATTERN = 3; */ PATTERN(3), ; /** * ARCHIVE = 1; */ public static final int ARCHIVE_VALUE = 1; /** * FILE = 2; */ public static final int FILE_VALUE = 2; /** * PATTERN = 3; */ public static final int PATTERN_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static LocalResourceTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static LocalResourceTypeProto forNumber(int value) { switch (value) { case 1: return ARCHIVE; case 2: return FILE; case 3: return PATTERN; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< LocalResourceTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public LocalResourceTypeProto findValueByNumber(int number) { return LocalResourceTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(7); } private static final LocalResourceTypeProto[] VALUES = values(); public static LocalResourceTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private LocalResourceTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalResourceTypeProto) } /** * Protobuf enum {@code hadoop.yarn.LogAggregationStatusProto} */ public enum LogAggregationStatusProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * LOG_DISABLED = 1; */ LOG_DISABLED(1), /** * LOG_NOT_START = 2; */ LOG_NOT_START(2), /** * LOG_RUNNING = 3; */ LOG_RUNNING(3), /** * LOG_SUCCEEDED = 4; */ LOG_SUCCEEDED(4), /** * LOG_FAILED = 5; */ LOG_FAILED(5), /** * LOG_TIME_OUT = 6; */ LOG_TIME_OUT(6), /** * LOG_RUNNING_WITH_FAILURE = 7; */ LOG_RUNNING_WITH_FAILURE(7), ; /** * LOG_DISABLED = 1; */ public static final int LOG_DISABLED_VALUE = 1; /** * LOG_NOT_START = 2; */ public static final int LOG_NOT_START_VALUE = 2; /** * LOG_RUNNING = 3; */ public static final int LOG_RUNNING_VALUE = 3; /** * LOG_SUCCEEDED = 4; */ public static final int LOG_SUCCEEDED_VALUE = 4; /** * LOG_FAILED = 5; */ public static final int LOG_FAILED_VALUE = 5; /** * LOG_TIME_OUT = 6; */ public static final int LOG_TIME_OUT_VALUE = 6; /** * LOG_RUNNING_WITH_FAILURE = 7; */ public static final int LOG_RUNNING_WITH_FAILURE_VALUE = 7; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static LogAggregationStatusProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static LogAggregationStatusProto forNumber(int value) { switch (value) { case 1: return LOG_DISABLED; case 2: return LOG_NOT_START; case 3: return LOG_RUNNING; case 4: return LOG_SUCCEEDED; case 5: return LOG_FAILED; case 6: return LOG_TIME_OUT; case 7: return LOG_RUNNING_WITH_FAILURE; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< LogAggregationStatusProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public LogAggregationStatusProto findValueByNumber(int number) { return LogAggregationStatusProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(8); } private static final LogAggregationStatusProto[] VALUES = values(); public static LogAggregationStatusProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private LogAggregationStatusProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.LogAggregationStatusProto) } /** * Protobuf enum {@code hadoop.yarn.NodeStateProto} */ public enum NodeStateProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NS_NEW = 1; */ NS_NEW(1), /** * NS_RUNNING = 2; */ NS_RUNNING(2), /** * NS_UNHEALTHY = 3; */ NS_UNHEALTHY(3), /** * NS_DECOMMISSIONED = 4; */ NS_DECOMMISSIONED(4), /** * NS_LOST = 5; */ NS_LOST(5), /** * NS_REBOOTED = 6; */ NS_REBOOTED(6), /** * NS_DECOMMISSIONING = 7; */ NS_DECOMMISSIONING(7), /** * NS_SHUTDOWN = 8; */ NS_SHUTDOWN(8), ; /** * NS_NEW = 1; */ public static final int NS_NEW_VALUE = 1; /** * NS_RUNNING = 2; */ public static final int NS_RUNNING_VALUE = 2; /** * NS_UNHEALTHY = 3; */ public static final int NS_UNHEALTHY_VALUE = 3; /** * NS_DECOMMISSIONED = 4; */ public static final int NS_DECOMMISSIONED_VALUE = 4; /** * NS_LOST = 5; */ public static final int NS_LOST_VALUE = 5; /** * NS_REBOOTED = 6; */ public static final int NS_REBOOTED_VALUE = 6; /** * NS_DECOMMISSIONING = 7; */ public static final int NS_DECOMMISSIONING_VALUE = 7; /** * NS_SHUTDOWN = 8; */ public static final int NS_SHUTDOWN_VALUE = 8; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static NodeStateProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static NodeStateProto forNumber(int value) { switch (value) { case 1: return NS_NEW; case 2: return NS_RUNNING; case 3: return NS_UNHEALTHY; case 4: return NS_DECOMMISSIONED; case 5: return NS_LOST; case 6: return NS_REBOOTED; case 7: return NS_DECOMMISSIONING; case 8: return NS_SHUTDOWN; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< NodeStateProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public NodeStateProto findValueByNumber(int number) { return NodeStateProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(9); } private static final NodeStateProto[] VALUES = values(); public static NodeStateProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private NodeStateProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeStateProto) } /** * Protobuf enum {@code hadoop.yarn.NodeUpdateTypeProto} */ public enum NodeUpdateTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NODE_USABLE = 0; */ NODE_USABLE(0), /** * NODE_UNUSABLE = 1; */ NODE_UNUSABLE(1), /** * NODE_DECOMMISSIONING = 2; */ NODE_DECOMMISSIONING(2), ; /** * NODE_USABLE = 0; */ public static final int NODE_USABLE_VALUE = 0; /** * NODE_UNUSABLE = 1; */ public static final int NODE_UNUSABLE_VALUE = 1; /** * NODE_DECOMMISSIONING = 2; */ public static final int NODE_DECOMMISSIONING_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static NodeUpdateTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static NodeUpdateTypeProto forNumber(int value) { switch (value) { case 0: return NODE_USABLE; case 1: return NODE_UNUSABLE; case 2: return NODE_DECOMMISSIONING; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< NodeUpdateTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public NodeUpdateTypeProto findValueByNumber(int number) { return NodeUpdateTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(10); } private static final NodeUpdateTypeProto[] VALUES = values(); public static NodeUpdateTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private NodeUpdateTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeUpdateTypeProto) } /** * Protobuf enum {@code hadoop.yarn.NodeAttributeTypeProto} */ public enum NodeAttributeTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * STRING = 1; */ STRING(1), ; /** * STRING = 1; */ public static final int STRING_VALUE = 1; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static NodeAttributeTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static NodeAttributeTypeProto forNumber(int value) { switch (value) { case 1: return STRING; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< NodeAttributeTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public NodeAttributeTypeProto findValueByNumber(int number) { return NodeAttributeTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(11); } private static final NodeAttributeTypeProto[] VALUES = values(); public static NodeAttributeTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private NodeAttributeTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeAttributeTypeProto) } /** * Protobuf enum {@code hadoop.yarn.ContainerTypeProto} */ public enum ContainerTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * APPLICATION_MASTER = 1; */ APPLICATION_MASTER(1), /** * TASK = 2; */ TASK(2), ; /** * APPLICATION_MASTER = 1; */ public static final int APPLICATION_MASTER_VALUE = 1; /** * TASK = 2; */ public static final int TASK_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ContainerTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ContainerTypeProto forNumber(int value) { switch (value) { case 1: return APPLICATION_MASTER; case 2: return TASK; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ContainerTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ContainerTypeProto findValueByNumber(int number) { return ContainerTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(12); } private static final ContainerTypeProto[] VALUES = values(); public static ContainerTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ContainerTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerTypeProto) } /** * Protobuf enum {@code hadoop.yarn.ExecutionTypeProto} */ public enum ExecutionTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * GUARANTEED = 1; */ GUARANTEED(1), /** * OPPORTUNISTIC = 2; */ OPPORTUNISTIC(2), ; /** * GUARANTEED = 1; */ public static final int GUARANTEED_VALUE = 1; /** * OPPORTUNISTIC = 2; */ public static final int OPPORTUNISTIC_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ExecutionTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ExecutionTypeProto forNumber(int value) { switch (value) { case 1: return GUARANTEED; case 2: return OPPORTUNISTIC; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ExecutionTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ExecutionTypeProto findValueByNumber(int number) { return ExecutionTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(13); } private static final ExecutionTypeProto[] VALUES = values(); public static ExecutionTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ExecutionTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ExecutionTypeProto) } /** * Protobuf enum {@code hadoop.yarn.AMCommandProto} */ public enum AMCommandProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * AM_RESYNC = 1; */ AM_RESYNC(1), /** * AM_SHUTDOWN = 2; */ AM_SHUTDOWN(2), ; /** * AM_RESYNC = 1; */ public static final int AM_RESYNC_VALUE = 1; /** * AM_SHUTDOWN = 2; */ public static final int AM_SHUTDOWN_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AMCommandProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AMCommandProto forNumber(int value) { switch (value) { case 1: return AM_RESYNC; case 2: return AM_SHUTDOWN; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< AMCommandProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public AMCommandProto findValueByNumber(int number) { return AMCommandProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(14); } private static final AMCommandProto[] VALUES = values(); public static AMCommandProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private AMCommandProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.AMCommandProto) } /** * Protobuf enum {@code hadoop.yarn.RejectionReasonProto} */ public enum RejectionReasonProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * RRP_COULD_NOT_PLACE_ON_NODE = 1; */ RRP_COULD_NOT_PLACE_ON_NODE(1), /** * RRP_COULD_NOT_SCHEDULE_ON_NODE = 2; */ RRP_COULD_NOT_SCHEDULE_ON_NODE(2), ; /** * RRP_COULD_NOT_PLACE_ON_NODE = 1; */ public static final int RRP_COULD_NOT_PLACE_ON_NODE_VALUE = 1; /** * RRP_COULD_NOT_SCHEDULE_ON_NODE = 2; */ public static final int RRP_COULD_NOT_SCHEDULE_ON_NODE_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RejectionReasonProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static RejectionReasonProto forNumber(int value) { switch (value) { case 1: return RRP_COULD_NOT_PLACE_ON_NODE; case 2: return RRP_COULD_NOT_SCHEDULE_ON_NODE; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< RejectionReasonProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public RejectionReasonProto findValueByNumber(int number) { return RejectionReasonProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(15); } private static final RejectionReasonProto[] VALUES = values(); public static RejectionReasonProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private RejectionReasonProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.RejectionReasonProto) } /** * Protobuf enum {@code hadoop.yarn.ApplicationTimeoutTypeProto} */ public enum ApplicationTimeoutTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * APP_TIMEOUT_LIFETIME = 1; */ APP_TIMEOUT_LIFETIME(1), ; /** * APP_TIMEOUT_LIFETIME = 1; */ public static final int APP_TIMEOUT_LIFETIME_VALUE = 1; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ApplicationTimeoutTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ApplicationTimeoutTypeProto forNumber(int value) { switch (value) { case 1: return APP_TIMEOUT_LIFETIME; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ApplicationTimeoutTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ApplicationTimeoutTypeProto findValueByNumber(int number) { return ApplicationTimeoutTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(16); } private static final ApplicationTimeoutTypeProto[] VALUES = values(); public static ApplicationTimeoutTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ApplicationTimeoutTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationTimeoutTypeProto) } /** * Protobuf enum {@code hadoop.yarn.ApplicationAccessTypeProto} */ public enum ApplicationAccessTypeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * APPACCESS_VIEW_APP = 1; */ APPACCESS_VIEW_APP(1), /** * APPACCESS_MODIFY_APP = 2; */ APPACCESS_MODIFY_APP(2), ; /** * APPACCESS_VIEW_APP = 1; */ public static final int APPACCESS_VIEW_APP_VALUE = 1; /** * APPACCESS_MODIFY_APP = 2; */ public static final int APPACCESS_MODIFY_APP_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ApplicationAccessTypeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ApplicationAccessTypeProto forNumber(int value) { switch (value) { case 1: return APPACCESS_VIEW_APP; case 2: return APPACCESS_MODIFY_APP; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ApplicationAccessTypeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ApplicationAccessTypeProto findValueByNumber(int number) { return ApplicationAccessTypeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(17); } private static final ApplicationAccessTypeProto[] VALUES = values(); public static ApplicationAccessTypeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ApplicationAccessTypeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationAccessTypeProto) } /** * Protobuf enum {@code hadoop.yarn.QueueStateProto} */ public enum QueueStateProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * Q_STOPPED = 1; */ Q_STOPPED(1), /** * Q_RUNNING = 2; */ Q_RUNNING(2), /** * Q_DRAINING = 3; */ Q_DRAINING(3), ; /** * Q_STOPPED = 1; */ public static final int Q_STOPPED_VALUE = 1; /** * Q_RUNNING = 2; */ public static final int Q_RUNNING_VALUE = 2; /** * Q_DRAINING = 3; */ public static final int Q_DRAINING_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static QueueStateProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static QueueStateProto forNumber(int value) { switch (value) { case 1: return Q_STOPPED; case 2: return Q_RUNNING; case 3: return Q_DRAINING; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< QueueStateProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public QueueStateProto findValueByNumber(int number) { return QueueStateProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(18); } private static final QueueStateProto[] VALUES = values(); public static QueueStateProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private QueueStateProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.QueueStateProto) } /** * Protobuf enum {@code hadoop.yarn.QueueACLProto} */ public enum QueueACLProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * QACL_SUBMIT_APPLICATIONS = 1; */ QACL_SUBMIT_APPLICATIONS(1), /** * QACL_ADMINISTER_QUEUE = 2; */ QACL_ADMINISTER_QUEUE(2), ; /** * QACL_SUBMIT_APPLICATIONS = 1; */ public static final int QACL_SUBMIT_APPLICATIONS_VALUE = 1; /** * QACL_ADMINISTER_QUEUE = 2; */ public static final int QACL_ADMINISTER_QUEUE_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static QueueACLProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static QueueACLProto forNumber(int value) { switch (value) { case 1: return QACL_SUBMIT_APPLICATIONS; case 2: return QACL_ADMINISTER_QUEUE; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< QueueACLProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public QueueACLProto findValueByNumber(int number) { return QueueACLProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(19); } private static final QueueACLProto[] VALUES = values(); public static QueueACLProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private QueueACLProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.QueueACLProto) } /** * Protobuf enum {@code hadoop.yarn.SignalContainerCommandProto} */ public enum SignalContainerCommandProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * OUTPUT_THREAD_DUMP = 1; */ OUTPUT_THREAD_DUMP(1), /** * GRACEFUL_SHUTDOWN = 2; */ GRACEFUL_SHUTDOWN(2), /** * FORCEFUL_SHUTDOWN = 3; */ FORCEFUL_SHUTDOWN(3), ; /** * OUTPUT_THREAD_DUMP = 1; */ public static final int OUTPUT_THREAD_DUMP_VALUE = 1; /** * GRACEFUL_SHUTDOWN = 2; */ public static final int GRACEFUL_SHUTDOWN_VALUE = 2; /** * FORCEFUL_SHUTDOWN = 3; */ public static final int FORCEFUL_SHUTDOWN_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SignalContainerCommandProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static SignalContainerCommandProto forNumber(int value) { switch (value) { case 1: return OUTPUT_THREAD_DUMP; case 2: return GRACEFUL_SHUTDOWN; case 3: return FORCEFUL_SHUTDOWN; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< SignalContainerCommandProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public SignalContainerCommandProto findValueByNumber(int number) { return SignalContainerCommandProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(20); } private static final SignalContainerCommandProto[] VALUES = values(); public static SignalContainerCommandProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private SignalContainerCommandProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.SignalContainerCommandProto) } /** * Protobuf enum {@code hadoop.yarn.NodeAttributeOpCodeProto} */ public enum NodeAttributeOpCodeProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NO_OP = 1; */ NO_OP(1), /** * EQ = 2; */ EQ(2), /** * NE = 3; */ NE(3), ; /** * NO_OP = 1; */ public static final int NO_OP_VALUE = 1; /** * EQ = 2; */ public static final int EQ_VALUE = 2; /** * NE = 3; */ public static final int NE_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static NodeAttributeOpCodeProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static NodeAttributeOpCodeProto forNumber(int value) { switch (value) { case 1: return NO_OP; case 2: return EQ; case 3: return NE; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< NodeAttributeOpCodeProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public NodeAttributeOpCodeProto findValueByNumber(int number) { return NodeAttributeOpCodeProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(21); } private static final NodeAttributeOpCodeProto[] VALUES = values(); public static NodeAttributeOpCodeProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private NodeAttributeOpCodeProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeAttributeOpCodeProto) } /** * Protobuf enum {@code hadoop.yarn.ReservationRequestInterpreterProto} */ public enum ReservationRequestInterpreterProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * R_ANY = 0; */ R_ANY(0), /** * R_ALL = 1; */ R_ALL(1), /** * R_ORDER = 2; */ R_ORDER(2), /** * R_ORDER_NO_GAP = 3; */ R_ORDER_NO_GAP(3), ; /** * R_ANY = 0; */ public static final int R_ANY_VALUE = 0; /** * R_ALL = 1; */ public static final int R_ALL_VALUE = 1; /** * R_ORDER = 2; */ public static final int R_ORDER_VALUE = 2; /** * R_ORDER_NO_GAP = 3; */ public static final int R_ORDER_NO_GAP_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ReservationRequestInterpreterProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ReservationRequestInterpreterProto forNumber(int value) { switch (value) { case 0: return R_ANY; case 1: return R_ALL; case 2: return R_ORDER; case 3: return R_ORDER_NO_GAP; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ReservationRequestInterpreterProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ReservationRequestInterpreterProto findValueByNumber(int number) { return ReservationRequestInterpreterProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(22); } private static final ReservationRequestInterpreterProto[] VALUES = values(); public static ReservationRequestInterpreterProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ReservationRequestInterpreterProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ReservationRequestInterpreterProto) } /** * Protobuf enum {@code hadoop.yarn.ContainerExitStatusProto} */ public enum ContainerExitStatusProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * SUCCESS = 0; */ SUCCESS(0), /** * INVALID = -1000; */ INVALID(-1000), /** * ABORTED = -100; */ ABORTED(-100), /** * DISKS_FAILED = -101; */ DISKS_FAILED(-101), ; /** * SUCCESS = 0; */ public static final int SUCCESS_VALUE = 0; /** * INVALID = -1000; */ public static final int INVALID_VALUE = -1000; /** * ABORTED = -100; */ public static final int ABORTED_VALUE = -100; /** * DISKS_FAILED = -101; */ public static final int DISKS_FAILED_VALUE = -101; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ContainerExitStatusProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ContainerExitStatusProto forNumber(int value) { switch (value) { case 0: return SUCCESS; case -1000: return INVALID; case -100: return ABORTED; case -101: return DISKS_FAILED; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ContainerExitStatusProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ContainerExitStatusProto findValueByNumber(int number) { return ContainerExitStatusProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(23); } private static final ContainerExitStatusProto[] VALUES = values(); public static ContainerExitStatusProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ContainerExitStatusProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerExitStatusProto) } /** * Protobuf enum {@code hadoop.yarn.ContainerRetryPolicyProto} */ public enum ContainerRetryPolicyProto implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NEVER_RETRY = 0; */ NEVER_RETRY(0), /** * RETRY_ON_ALL_ERRORS = 1; */ RETRY_ON_ALL_ERRORS(1), /** * RETRY_ON_SPECIFIC_ERROR_CODES = 2; */ RETRY_ON_SPECIFIC_ERROR_CODES(2), ; /** * NEVER_RETRY = 0; */ public static final int NEVER_RETRY_VALUE = 0; /** * RETRY_ON_ALL_ERRORS = 1; */ public static final int RETRY_ON_ALL_ERRORS_VALUE = 1; /** * RETRY_ON_SPECIFIC_ERROR_CODES = 2; */ public static final int RETRY_ON_SPECIFIC_ERROR_CODES_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ContainerRetryPolicyProto valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ContainerRetryPolicyProto forNumber(int value) { switch (value) { case 0: return NEVER_RETRY; case 1: return RETRY_ON_ALL_ERRORS; case 2: return RETRY_ON_SPECIFIC_ERROR_CODES; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< ContainerRetryPolicyProto> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public ContainerRetryPolicyProto findValueByNumber(int number) { return ContainerRetryPolicyProto.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(24); } private static final ContainerRetryPolicyProto[] VALUES = values(); public static ContainerRetryPolicyProto valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ContainerRetryPolicyProto(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerRetryPolicyProto) } public interface SerializedExceptionProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SerializedExceptionProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string message = 1; * @return Whether the message field is set. */ boolean hasMessage(); /** * optional string message = 1; * @return The message. */ java.lang.String getMessage(); /** * optional string message = 1; * @return The bytes for message. */ org.apache.hadoop.thirdparty.protobuf.ByteString getMessageBytes(); /** * optional string trace = 2; * @return Whether the trace field is set. */ boolean hasTrace(); /** * optional string trace = 2; * @return The trace. */ java.lang.String getTrace(); /** * optional string trace = 2; * @return The bytes for trace. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTraceBytes(); /** * optional string class_name = 3; * @return Whether the className field is set. */ boolean hasClassName(); /** * optional string class_name = 3; * @return The className. */ java.lang.String getClassName(); /** * optional string class_name = 3; * @return The bytes for className. */ org.apache.hadoop.thirdparty.protobuf.ByteString getClassNameBytes(); /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return Whether the cause field is set. */ boolean hasCause(); /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return The cause. */ org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause(); /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.SerializedExceptionProto} */ public static final class SerializedExceptionProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SerializedExceptionProto) SerializedExceptionProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SerializedExceptionProto.newBuilder() to construct. private SerializedExceptionProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SerializedExceptionProto() { message_ = ""; trace_ = ""; className_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SerializedExceptionProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder.class); } private int bitField0_; public static final int MESSAGE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object message_ = ""; /** * optional string message = 1; * @return Whether the message field is set. */ @java.lang.Override public boolean hasMessage() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string message = 1; * @return The message. */ @java.lang.Override public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { message_ = s; } return s; } } /** * optional string message = 1; * @return The bytes for message. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TRACE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object trace_ = ""; /** * optional string trace = 2; * @return Whether the trace field is set. */ @java.lang.Override public boolean hasTrace() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string trace = 2; * @return The trace. */ @java.lang.Override public java.lang.String getTrace() { java.lang.Object ref = trace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trace_ = s; } return s; } } /** * optional string trace = 2; * @return The bytes for trace. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTraceBytes() { java.lang.Object ref = trace_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trace_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CLASS_NAME_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object className_ = ""; /** * optional string class_name = 3; * @return Whether the className field is set. */ @java.lang.Override public boolean hasClassName() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string class_name = 3; * @return The className. */ @java.lang.Override public java.lang.String getClassName() { java.lang.Object ref = className_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { className_ = s; } return s; } } /** * optional string class_name = 3; * @return The bytes for className. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getClassNameBytes() { java.lang.Object ref = className_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); className_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CAUSE_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto cause_; /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return Whether the cause field is set. */ @java.lang.Override public boolean hasCause() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return The cause. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause() { return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_; } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder() { return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, trace_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, className_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getCause()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, message_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, trace_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, className_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getCause()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto) obj; if (hasMessage() != other.hasMessage()) return false; if (hasMessage()) { if (!getMessage() .equals(other.getMessage())) return false; } if (hasTrace() != other.hasTrace()) return false; if (hasTrace()) { if (!getTrace() .equals(other.getTrace())) return false; } if (hasClassName() != other.hasClassName()) return false; if (hasClassName()) { if (!getClassName() .equals(other.getClassName())) return false; } if (hasCause() != other.hasCause()) return false; if (hasCause()) { if (!getCause() .equals(other.getCause())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMessage()) { hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); } if (hasTrace()) { hash = (37 * hash) + TRACE_FIELD_NUMBER; hash = (53 * hash) + getTrace().hashCode(); } if (hasClassName()) { hash = (37 * hash) + CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getClassName().hashCode(); } if (hasCause()) { hash = (37 * hash) + CAUSE_FIELD_NUMBER; hash = (53 * hash) + getCause().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SerializedExceptionProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SerializedExceptionProto) org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCauseFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; message_ = ""; trace_ = ""; className_ = ""; cause_ = null; if (causeBuilder_ != null) { causeBuilder_.dispose(); causeBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto build() { org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.message_ = message_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.trace_ = trace_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.className_ = className_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.cause_ = causeBuilder_ == null ? cause_ : causeBuilder_.build(); to_bitField0_ |= 0x00000008; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) return this; if (other.hasMessage()) { message_ = other.message_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasTrace()) { trace_ = other.trace_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasClassName()) { className_ = other.className_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasCause()) { mergeCause(other.getCause()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { message_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { trace_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { className_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getCauseFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object message_ = ""; /** * optional string message = 1; * @return Whether the message field is set. */ public boolean hasMessage() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string message = 1; * @return The message. */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { message_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string message = 1; * @return The bytes for message. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); message_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string message = 1; * @param value The message to set. * @return This builder for chaining. */ public Builder setMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string message = 1; * @return This builder for chaining. */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string message = 1; * @param value The bytes for message to set. * @return This builder for chaining. */ public Builder setMessageBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } message_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object trace_ = ""; /** * optional string trace = 2; * @return Whether the trace field is set. */ public boolean hasTrace() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string trace = 2; * @return The trace. */ public java.lang.String getTrace() { java.lang.Object ref = trace_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trace_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string trace = 2; * @return The bytes for trace. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTraceBytes() { java.lang.Object ref = trace_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trace_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string trace = 2; * @param value The trace to set. * @return This builder for chaining. */ public Builder setTrace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } trace_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string trace = 2; * @return This builder for chaining. */ public Builder clearTrace() { trace_ = getDefaultInstance().getTrace(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string trace = 2; * @param value The bytes for trace to set. * @return This builder for chaining. */ public Builder setTraceBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } trace_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object className_ = ""; /** * optional string class_name = 3; * @return Whether the className field is set. */ public boolean hasClassName() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string class_name = 3; * @return The className. */ public java.lang.String getClassName() { java.lang.Object ref = className_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { className_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string class_name = 3; * @return The bytes for className. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getClassNameBytes() { java.lang.Object ref = className_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); className_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string class_name = 3; * @param value The className to set. * @return This builder for chaining. */ public Builder setClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } className_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string class_name = 3; * @return This builder for chaining. */ public Builder clearClassName() { className_ = getDefaultInstance().getClassName(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string class_name = 3; * @param value The bytes for className to set. * @return This builder for chaining. */ public Builder setClassNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } className_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto cause_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> causeBuilder_; /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return Whether the cause field is set. */ public boolean hasCause() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; * @return The cause. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause() { if (causeBuilder_ == null) { return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_; } else { return causeBuilder_.getMessage(); } } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public Builder setCause(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) { if (causeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cause_ = value; } else { causeBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public Builder setCause( org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder builderForValue) { if (causeBuilder_ == null) { cause_ = builderForValue.build(); } else { causeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public Builder mergeCause(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) { if (causeBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && cause_ != null && cause_ != org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) { getCauseBuilder().mergeFrom(value); } else { cause_ = value; } } else { causeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public Builder clearCause() { bitField0_ = (bitField0_ & ~0x00000008); cause_ = null; if (causeBuilder_ != null) { causeBuilder_.dispose(); causeBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder getCauseBuilder() { bitField0_ |= 0x00000008; onChanged(); return getCauseFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder() { if (causeBuilder_ != null) { return causeBuilder_.getMessageOrBuilder(); } else { return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_; } } /** * optional .hadoop.yarn.SerializedExceptionProto cause = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> getCauseFieldBuilder() { if (causeBuilder_ == null) { causeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder>( getCause(), getParentForChildren(), isClean()); cause_ = null; } return causeBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SerializedExceptionProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SerializedExceptionProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SerializedExceptionProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationIdProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationIdProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 id = 1; * @return Whether the id field is set. */ boolean hasId(); /** * optional int32 id = 1; * @return The id. */ int getId(); /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ boolean hasClusterTimestamp(); /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ long getClusterTimestamp(); } /** * Protobuf type {@code hadoop.yarn.ApplicationIdProto} */ public static final class ApplicationIdProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationIdProto) ApplicationIdProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationIdProto.newBuilder() to construct. private ApplicationIdProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationIdProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationIdProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder.class); } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; private int id_ = 0; /** * optional int32 id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 id = 1; * @return The id. */ @java.lang.Override public int getId() { return id_; } public static final int CLUSTER_TIMESTAMP_FIELD_NUMBER = 2; private long clusterTimestamp_ = 0L; /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ @java.lang.Override public boolean hasClusterTimestamp() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ @java.lang.Override public long getClusterTimestamp() { return clusterTimestamp_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, id_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, clusterTimestamp_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, id_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, clusterTimestamp_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto) obj; if (hasId() != other.hasId()) return false; if (hasId()) { if (getId() != other.getId()) return false; } if (hasClusterTimestamp() != other.hasClusterTimestamp()) return false; if (hasClusterTimestamp()) { if (getClusterTimestamp() != other.getClusterTimestamp()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId(); } if (hasClusterTimestamp()) { hash = (37 * hash) + CLUSTER_TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getClusterTimestamp()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationIdProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationIdProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = 0; clusterTimestamp_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = id_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.clusterTimestamp_ = clusterTimestamp_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) return this; if (other.hasId()) { setId(other.getId()); } if (other.hasClusterTimestamp()) { setClusterTimestamp(other.getClusterTimestamp()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { id_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { clusterTimestamp_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int id_ ; /** * optional int32 id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 id = 1; * @return The id. */ @java.lang.Override public int getId() { return id_; } /** * optional int32 id = 1; * @param value The id to set. * @return This builder for chaining. */ public Builder setId(int value) { id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 id = 1; * @return This builder for chaining. */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = 0; onChanged(); return this; } private long clusterTimestamp_ ; /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ @java.lang.Override public boolean hasClusterTimestamp() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ @java.lang.Override public long getClusterTimestamp() { return clusterTimestamp_; } /** * optional int64 cluster_timestamp = 2; * @param value The clusterTimestamp to set. * @return This builder for chaining. */ public Builder setClusterTimestamp(long value) { clusterTimestamp_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 cluster_timestamp = 2; * @return This builder for chaining. */ public Builder clearClusterTimestamp() { bitField0_ = (bitField0_ & ~0x00000002); clusterTimestamp_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationIdProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationIdProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationIdProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationAttemptIdProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptIdProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional int32 attemptId = 2; * @return Whether the attemptId field is set. */ boolean hasAttemptId(); /** * optional int32 attemptId = 2; * @return The attemptId. */ int getAttemptId(); } /** * Protobuf type {@code hadoop.yarn.ApplicationAttemptIdProto} */ public static final class ApplicationAttemptIdProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptIdProto) ApplicationAttemptIdProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationAttemptIdProto.newBuilder() to construct. private ApplicationAttemptIdProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationAttemptIdProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationAttemptIdProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ @java.lang.Override public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int ATTEMPTID_FIELD_NUMBER = 2; private int attemptId_ = 0; /** * optional int32 attemptId = 2; * @return Whether the attemptId field is set. */ @java.lang.Override public boolean hasAttemptId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 attemptId = 2; * @return The attemptId. */ @java.lang.Override public int getAttemptId() { return attemptId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, attemptId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, attemptId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasAttemptId() != other.hasAttemptId()) return false; if (hasAttemptId()) { if (getAttemptId() != other.getAttemptId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasAttemptId()) { hash = (37 * hash) + ATTEMPTID_FIELD_NUMBER; hash = (53 * hash) + getAttemptId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationAttemptIdProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptIdProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } attemptId_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationId_ = applicationIdBuilder_ == null ? applicationId_ : applicationIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.attemptId_ = attemptId_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasAttemptId()) { setAttemptId(other.getAttemptId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getApplicationIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { attemptId_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { getApplicationIdBuilder().mergeFrom(value); } else { applicationId_ = value; } } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { bitField0_ = (bitField0_ & ~0x00000001); applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private int attemptId_ ; /** * optional int32 attemptId = 2; * @return Whether the attemptId field is set. */ @java.lang.Override public boolean hasAttemptId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 attemptId = 2; * @return The attemptId. */ @java.lang.Override public int getAttemptId() { return attemptId_; } /** * optional int32 attemptId = 2; * @param value The attemptId to set. * @return This builder for chaining. */ public Builder setAttemptId(int value) { attemptId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 attemptId = 2; * @return This builder for chaining. */ public Builder clearAttemptId() { bitField0_ = (bitField0_ & ~0x00000002); attemptId_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptIdProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptIdProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationAttemptIdProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerIdProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerIdProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return Whether the appId field is set. */ boolean hasAppId(); /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return The appId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId(); /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return Whether the appAttemptId field is set. */ boolean hasAppAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return The appAttemptId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder(); /** * optional int64 id = 3; * @return Whether the id field is set. */ boolean hasId(); /** * optional int64 id = 3; * @return The id. */ long getId(); } /** * Protobuf type {@code hadoop.yarn.ContainerIdProto} */ public static final class ContainerIdProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerIdProto) ContainerIdProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerIdProto.newBuilder() to construct. private ContainerIdProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerIdProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerIdProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder.class); } private int bitField0_; public static final int APP_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_; /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return Whether the appId field is set. */ @java.lang.Override public boolean hasAppId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return The appId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() { return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_; } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() { return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_; } public static final int APP_ATTEMPT_ID_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return Whether the appAttemptId field is set. */ @java.lang.Override public boolean hasAppAttemptId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return The appAttemptId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() { return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() { return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_; } public static final int ID_FIELD_NUMBER = 3; private long id_ = 0L; /** * optional int64 id = 3; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 id = 3; * @return The id. */ @java.lang.Override public long getId() { return id_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getAppId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getAppAttemptId()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(3, id_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getAppId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getAppAttemptId()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, id_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto) obj; if (hasAppId() != other.hasAppId()) return false; if (hasAppId()) { if (!getAppId() .equals(other.getAppId())) return false; } if (hasAppAttemptId() != other.hasAppAttemptId()) return false; if (hasAppAttemptId()) { if (!getAppAttemptId() .equals(other.getAppAttemptId())) return false; } if (hasId() != other.hasId()) return false; if (hasId()) { if (getId() != other.getId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAppId()) { hash = (37 * hash) + APP_ID_FIELD_NUMBER; hash = (53 * hash) + getAppId().hashCode(); } if (hasAppAttemptId()) { hash = (37 * hash) + APP_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getAppAttemptId().hashCode(); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerIdProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerIdProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAppIdFieldBuilder(); getAppAttemptIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; appId_ = null; if (appIdBuilder_ != null) { appIdBuilder_.dispose(); appIdBuilder_ = null; } appAttemptId_ = null; if (appAttemptIdBuilder_ != null) { appAttemptIdBuilder_.dispose(); appAttemptIdBuilder_ = null; } id_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.appId_ = appIdBuilder_ == null ? appId_ : appIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.appAttemptId_ = appAttemptIdBuilder_ == null ? appAttemptId_ : appAttemptIdBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.id_ = id_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) return this; if (other.hasAppId()) { mergeAppId(other.getAppId()); } if (other.hasAppAttemptId()) { mergeAppAttemptId(other.getAppAttemptId()); } if (other.hasId()) { setId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getAppIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getAppAttemptIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { id_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return Whether the appId field is set. */ public boolean hasAppId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; * @return The appId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() { if (appIdBuilder_ == null) { return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_; } else { return appIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public Builder setAppId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (appIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } appId_ = value; } else { appIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public Builder setAppId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (appIdBuilder_ == null) { appId_ = builderForValue.build(); } else { appIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public Builder mergeAppId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (appIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && appId_ != null && appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { getAppIdBuilder().mergeFrom(value); } else { appId_ = value; } } else { appIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public Builder clearAppId() { bitField0_ = (bitField0_ & ~0x00000001); appId_ = null; if (appIdBuilder_ != null) { appIdBuilder_.dispose(); appIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAppIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() { if (appIdBuilder_ != null) { return appIdBuilder_.getMessageOrBuilder(); } else { return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_; } } /** * optional .hadoop.yarn.ApplicationIdProto app_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getAppIdFieldBuilder() { if (appIdBuilder_ == null) { appIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getAppId(), getParentForChildren(), isClean()); appId_ = null; } return appIdBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> appAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return Whether the appAttemptId field is set. */ public boolean hasAppAttemptId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; * @return The appAttemptId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() { if (appAttemptIdBuilder_ == null) { return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_; } else { return appAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public Builder setAppAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (appAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } appAttemptId_ = value; } else { appAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public Builder setAppAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (appAttemptIdBuilder_ == null) { appAttemptId_ = builderForValue.build(); } else { appAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public Builder mergeAppAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (appAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && appAttemptId_ != null && appAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { getAppAttemptIdBuilder().mergeFrom(value); } else { appAttemptId_ = value; } } else { appAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public Builder clearAppAttemptId() { bitField0_ = (bitField0_ & ~0x00000002); appAttemptId_ = null; if (appAttemptIdBuilder_ != null) { appAttemptIdBuilder_.dispose(); appAttemptIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getAppAttemptIdBuilder() { bitField0_ |= 0x00000002; onChanged(); return getAppAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() { if (appAttemptIdBuilder_ != null) { return appAttemptIdBuilder_.getMessageOrBuilder(); } else { return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getAppAttemptIdFieldBuilder() { if (appAttemptIdBuilder_ == null) { appAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getAppAttemptId(), getParentForChildren(), isClean()); appAttemptId_ = null; } return appAttemptIdBuilder_; } private long id_ ; /** * optional int64 id = 3; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 id = 3; * @return The id. */ @java.lang.Override public long getId() { return id_; } /** * optional int64 id = 3; * @param value The id to set. * @return This builder for chaining. */ public Builder setId(long value) { id_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 id = 3; * @return This builder for chaining. */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000004); id_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerIdProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerIdProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerIdProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceInformationProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceInformationProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * required string key = 1; * @return The key. */ java.lang.String getKey(); /** * required string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * optional int64 value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * optional int64 value = 2; * @return The value. */ long getValue(); /** * optional string units = 3; * @return Whether the units field is set. */ boolean hasUnits(); /** * optional string units = 3; * @return The units. */ java.lang.String getUnits(); /** * optional string units = 3; * @return The bytes for units. */ org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes(); /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return Whether the type field is set. */ boolean hasType(); /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return The type. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType(); /** * repeated string tags = 5; * @return A list containing the tags. */ java.util.List getTagsList(); /** * repeated string tags = 5; * @return The count of tags. */ int getTagsCount(); /** * repeated string tags = 5; * @param index The index of the element to return. * @return The tags at the given index. */ java.lang.String getTags(int index); /** * repeated string tags = 5; * @param index The index of the value to return. * @return The bytes of the tags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTagsBytes(int index); /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ java.util.List getAttributesList(); /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index); /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ int getAttributesCount(); /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ java.util.List getAttributesOrBuilderList(); /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ResourceInformationProto} */ public static final class ResourceInformationProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceInformationProto) ResourceInformationProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceInformationProto.newBuilder() to construct. private ResourceInformationProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceInformationProto() { key_ = ""; units_ = ""; type_ = 0; tags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; attributes_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceInformationProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * required string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private long value_ = 0L; /** * optional int64 value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 value = 2; * @return The value. */ @java.lang.Override public long getValue() { return value_; } public static final int UNITS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object units_ = ""; /** * optional string units = 3; * @return Whether the units field is set. */ @java.lang.Override public boolean hasUnits() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string units = 3; * @return The units. */ @java.lang.Override public java.lang.String getUnits() { java.lang.Object ref = units_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { units_ = s; } return s; } } /** * optional string units = 3; * @return The bytes for units. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes() { java.lang.Object ref = units_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); units_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 4; private int type_ = 0; /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result; } public static final int TAGS_FIELD_NUMBER = 5; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList tags_; /** * repeated string tags = 5; * @return A list containing the tags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getTagsList() { return tags_; } /** * repeated string tags = 5; * @return The count of tags. */ public int getTagsCount() { return tags_.size(); } /** * repeated string tags = 5; * @param index The index of the element to return. * @return The tags at the given index. */ public java.lang.String getTags(int index) { return tags_.get(index); } /** * repeated string tags = 5; * @param index The index of the value to return. * @return The bytes of the tags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTagsBytes(int index) { return tags_.getByteString(index); } public static final int ATTRIBUTES_FIELD_NUMBER = 6; @SuppressWarnings("serial") private java.util.List attributes_; /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ @java.lang.Override public java.util.List getAttributesList() { return attributes_; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ @java.lang.Override public java.util.List getAttributesOrBuilderList() { return attributes_; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ @java.lang.Override public int getAttributesCount() { return attributes_.size(); } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index) { return attributes_.get(index); } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasKey()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, value_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, units_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeEnum(4, type_); } for (int i = 0; i < tags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, tags_.getRaw(i)); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(6, attributes_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, value_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, units_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(4, type_); } { int dataSize = 0; for (int i = 0; i < tags_.size(); i++) { dataSize += computeStringSizeNoTag(tags_.getRaw(i)); } size += dataSize; size += 1 * getTagsList().size(); } for (int i = 0; i < attributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, attributes_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (getValue() != other.getValue()) return false; } if (hasUnits() != other.hasUnits()) return false; if (hasUnits()) { if (!getUnits() .equals(other.getUnits())) return false; } if (hasType() != other.hasType()) return false; if (hasType()) { if (type_ != other.type_) return false; } if (!getTagsList() .equals(other.getTagsList())) return false; if (!getAttributesList() .equals(other.getAttributesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getValue()); } if (hasUnits()) { hash = (37 * hash) + UNITS_FIELD_NUMBER; hash = (53 * hash) + getUnits().hashCode(); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } if (getTagsCount() > 0) { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTagsList().hashCode(); } if (getAttributesCount() > 0) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getAttributesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceInformationProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceInformationProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = 0L; units_ = ""; type_ = 0; tags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); } else { attributes_ = null; attributesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result) { if (((bitField0_ & 0x00000010) != 0)) { tags_ = tags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } result.tags_ = tags_; if (attributesBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); bitField0_ = (bitField0_ & ~0x00000020); } result.attributes_ = attributes_; } else { result.attributes_ = attributesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = value_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.units_ = units_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.type_ = type_; to_bitField0_ |= 0x00000008; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } if (other.hasUnits()) { units_ = other.units_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasType()) { setType(other.getType()); } if (!other.tags_.isEmpty()) { if (tags_.isEmpty()) { tags_ = other.tags_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureTagsIsMutable(); tags_.addAll(other.tags_); } onChanged(); } if (attributesBuilder_ == null) { if (!other.attributes_.isEmpty()) { if (attributes_.isEmpty()) { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureAttributesIsMutable(); attributes_.addAll(other.attributes_); } onChanged(); } } else { if (!other.attributes_.isEmpty()) { if (attributesBuilder_.isEmpty()) { attributesBuilder_.dispose(); attributesBuilder_ = null; attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000020); attributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasKey()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { value_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { units_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(4, tmpRaw); } else { type_ = tmpRaw; bitField0_ |= 0x00000008; } break; } // case 32 case 42: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureTagsIsMutable(); tags_.add(bs); break; } // case 42 case 50: { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry); if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(m); } else { attributesBuilder_.addMessage(m); } break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private long value_ ; /** * optional int64 value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 value = 2; * @return The value. */ @java.lang.Override public long getValue() { return value_; } /** * optional int64 value = 2; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(long value) { value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 value = 2; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0L; onChanged(); return this; } private java.lang.Object units_ = ""; /** * optional string units = 3; * @return Whether the units field is set. */ public boolean hasUnits() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string units = 3; * @return The units. */ public java.lang.String getUnits() { java.lang.Object ref = units_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { units_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string units = 3; * @return The bytes for units. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes() { java.lang.Object ref = units_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); units_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string units = 3; * @param value The units to set. * @return This builder for chaining. */ public Builder setUnits( java.lang.String value) { if (value == null) { throw new NullPointerException(); } units_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string units = 3; * @return This builder for chaining. */ public Builder clearUnits() { units_ = getDefaultInstance().getUnits(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string units = 3; * @param value The bytes for units to set. * @return This builder for chaining. */ public Builder setUnitsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } units_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int type_ = 0; /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result; } /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ResourceTypesProto type = 4; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList tags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureTagsIsMutable() { if (!((bitField0_ & 0x00000010) != 0)) { tags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(tags_); bitField0_ |= 0x00000010; } } /** * repeated string tags = 5; * @return A list containing the tags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getTagsList() { return tags_.getUnmodifiableView(); } /** * repeated string tags = 5; * @return The count of tags. */ public int getTagsCount() { return tags_.size(); } /** * repeated string tags = 5; * @param index The index of the element to return. * @return The tags at the given index. */ public java.lang.String getTags(int index) { return tags_.get(index); } /** * repeated string tags = 5; * @param index The index of the value to return. * @return The bytes of the tags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTagsBytes(int index) { return tags_.getByteString(index); } /** * repeated string tags = 5; * @param index The index to set the value at. * @param value The tags to set. * @return This builder for chaining. */ public Builder setTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.set(index, value); onChanged(); return this; } /** * repeated string tags = 5; * @param value The tags to add. * @return This builder for chaining. */ public Builder addTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.add(value); onChanged(); return this; } /** * repeated string tags = 5; * @param values The tags to add. * @return This builder for chaining. */ public Builder addAllTags( java.lang.Iterable values) { ensureTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, tags_); onChanged(); return this; } /** * repeated string tags = 5; * @return This builder for chaining. */ public Builder clearTags() { tags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * repeated string tags = 5; * @param value The bytes of the tags to add. * @return This builder for chaining. */ public Builder addTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureTagsIsMutable(); tags_.add(value); onChanged(); return this; } private java.util.List attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { attributes_ = new java.util.ArrayList(attributes_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> attributesBuilder_; /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public java.util.List getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); } else { return attributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); } else { return attributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder setAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.set(index, value); onChanged(); } else { attributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder setAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.set(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder addAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(value); onChanged(); } else { attributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder addAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(index, value); onChanged(); } else { attributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder addAttributes( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder addAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder addAllAttributes( java.lang.Iterable values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { attributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.remove(index); onChanged(); } else { attributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public java.util.List getAttributesOrBuilderList() { if (attributesBuilder_ != null) { return attributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributes_); } } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto attributes = 6; */ public java.util.List getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { attributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>( attributes_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); attributes_ = null; } return attributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceInformationProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceInformationProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceInformationProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceTypeInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceTypeInfoProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string name = 1; * @return Whether the name field is set. */ boolean hasName(); /** * required string name = 1; * @return The name. */ java.lang.String getName(); /** * required string name = 1; * @return The bytes for name. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes(); /** * optional string units = 2; * @return Whether the units field is set. */ boolean hasUnits(); /** * optional string units = 2; * @return The units. */ java.lang.String getUnits(); /** * optional string units = 2; * @return The bytes for units. */ org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes(); /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return Whether the type field is set. */ boolean hasType(); /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return The type. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType(); } /** * Protobuf type {@code hadoop.yarn.ResourceTypeInfoProto} */ public static final class ResourceTypeInfoProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceTypeInfoProto) ResourceTypeInfoProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceTypeInfoProto.newBuilder() to construct. private ResourceTypeInfoProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceTypeInfoProto() { name_ = ""; units_ = ""; type_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceTypeInfoProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * required string name = 1; * @return Whether the name field is set. */ @java.lang.Override public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string name = 1; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * required string name = 1; * @return The bytes for name. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int UNITS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object units_ = ""; /** * optional string units = 2; * @return Whether the units field is set. */ @java.lang.Override public boolean hasUnits() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string units = 2; * @return The units. */ @java.lang.Override public java.lang.String getUnits() { java.lang.Object ref = units_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { units_ = s; } return s; } } /** * optional string units = 2; * @return The bytes for units. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes() { java.lang.Object ref = units_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); units_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 3; private int type_ = 0; /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, units_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(3, type_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, units_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(3, type_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto) obj; if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName() .equals(other.getName())) return false; } if (hasUnits() != other.hasUnits()) return false; if (hasUnits()) { if (!getUnits() .equals(other.getUnits())) return false; } if (hasType() != other.hasType()) return false; if (hasType()) { if (type_ != other.type_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasUnits()) { hash = (37 * hash) + UNITS_FIELD_NUMBER; hash = (53 * hash) + getUnits().hashCode(); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceTypeInfoProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceTypeInfoProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; units_ = ""; type_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.units_ = units_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.type_ = type_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()) return this; if (other.hasName()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasUnits()) { units_ = other.units_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasType()) { setType(other.getType()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasName()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { units_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(3, tmpRaw); } else { type_ = tmpRaw; bitField0_ |= 0x00000004; } break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * required string name = 1; * @return Whether the name field is set. */ public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string name = 1; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string name = 1; * @return The bytes for name. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string name = 1; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string name = 1; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string name = 1; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object units_ = ""; /** * optional string units = 2; * @return Whether the units field is set. */ public boolean hasUnits() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string units = 2; * @return The units. */ public java.lang.String getUnits() { java.lang.Object ref = units_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { units_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string units = 2; * @return The bytes for units. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUnitsBytes() { java.lang.Object ref = units_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); units_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string units = 2; * @param value The units to set. * @return This builder for chaining. */ public Builder setUnits( java.lang.String value) { if (value == null) { throw new NullPointerException(); } units_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string units = 2; * @return This builder for chaining. */ public Builder clearUnits() { units_ = getDefaultInstance().getUnits(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string units = 2; * @param value The bytes for units to set. * @return This builder for chaining. */ public Builder setUnitsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } units_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int type_ = 0; /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result; } /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; type_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ResourceTypesProto type = 3; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000004); type_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceTypeInfoProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceTypeInfoProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceTypeInfoProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int64 memory = 1; * @return Whether the memory field is set. */ boolean hasMemory(); /** * optional int64 memory = 1; * @return The memory. */ long getMemory(); /** * optional int32 virtual_cores = 2; * @return Whether the virtualCores field is set. */ boolean hasVirtualCores(); /** * optional int32 virtual_cores = 2; * @return The virtualCores. */ int getVirtualCores(); /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ java.util.List getResourceValueMapList(); /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index); /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ int getResourceValueMapCount(); /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ java.util.List getResourceValueMapOrBuilderList(); /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ResourceProto} */ public static final class ResourceProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProto) ResourceProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceProto.newBuilder() to construct. private ResourceProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceProto() { resourceValueMap_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder.class); } private int bitField0_; public static final int MEMORY_FIELD_NUMBER = 1; private long memory_ = 0L; /** * optional int64 memory = 1; * @return Whether the memory field is set. */ @java.lang.Override public boolean hasMemory() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 memory = 1; * @return The memory. */ @java.lang.Override public long getMemory() { return memory_; } public static final int VIRTUAL_CORES_FIELD_NUMBER = 2; private int virtualCores_ = 0; /** * optional int32 virtual_cores = 2; * @return Whether the virtualCores field is set. */ @java.lang.Override public boolean hasVirtualCores() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 virtual_cores = 2; * @return The virtualCores. */ @java.lang.Override public int getVirtualCores() { return virtualCores_; } public static final int RESOURCE_VALUE_MAP_FIELD_NUMBER = 3; @SuppressWarnings("serial") private java.util.List resourceValueMap_; /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ @java.lang.Override public java.util.List getResourceValueMapList() { return resourceValueMap_; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ @java.lang.Override public java.util.List getResourceValueMapOrBuilderList() { return resourceValueMap_; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ @java.lang.Override public int getResourceValueMapCount() { return resourceValueMap_.size(); } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index) { return resourceValueMap_.get(index); } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder( int index) { return resourceValueMap_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getResourceValueMapCount(); i++) { if (!getResourceValueMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(1, memory_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, virtualCores_); } for (int i = 0; i < resourceValueMap_.size(); i++) { output.writeMessage(3, resourceValueMap_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(1, memory_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, virtualCores_); } for (int i = 0; i < resourceValueMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, resourceValueMap_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto) obj; if (hasMemory() != other.hasMemory()) return false; if (hasMemory()) { if (getMemory() != other.getMemory()) return false; } if (hasVirtualCores() != other.hasVirtualCores()) return false; if (hasVirtualCores()) { if (getVirtualCores() != other.getVirtualCores()) return false; } if (!getResourceValueMapList() .equals(other.getResourceValueMapList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMemory()) { hash = (37 * hash) + MEMORY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getMemory()); } if (hasVirtualCores()) { hash = (37 * hash) + VIRTUAL_CORES_FIELD_NUMBER; hash = (53 * hash) + getVirtualCores(); } if (getResourceValueMapCount() > 0) { hash = (37 * hash) + RESOURCE_VALUE_MAP_FIELD_NUMBER; hash = (53 * hash) + getResourceValueMapList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; memory_ = 0L; virtualCores_ = 0; if (resourceValueMapBuilder_ == null) { resourceValueMap_ = java.util.Collections.emptyList(); } else { resourceValueMap_ = null; resourceValueMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result) { if (resourceValueMapBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { resourceValueMap_ = java.util.Collections.unmodifiableList(resourceValueMap_); bitField0_ = (bitField0_ & ~0x00000004); } result.resourceValueMap_ = resourceValueMap_; } else { result.resourceValueMap_ = resourceValueMapBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.memory_ = memory_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.virtualCores_ = virtualCores_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) return this; if (other.hasMemory()) { setMemory(other.getMemory()); } if (other.hasVirtualCores()) { setVirtualCores(other.getVirtualCores()); } if (resourceValueMapBuilder_ == null) { if (!other.resourceValueMap_.isEmpty()) { if (resourceValueMap_.isEmpty()) { resourceValueMap_ = other.resourceValueMap_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureResourceValueMapIsMutable(); resourceValueMap_.addAll(other.resourceValueMap_); } onChanged(); } } else { if (!other.resourceValueMap_.isEmpty()) { if (resourceValueMapBuilder_.isEmpty()) { resourceValueMapBuilder_.dispose(); resourceValueMapBuilder_ = null; resourceValueMap_ = other.resourceValueMap_; bitField0_ = (bitField0_ & ~0x00000004); resourceValueMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResourceValueMapFieldBuilder() : null; } else { resourceValueMapBuilder_.addAllMessages(other.resourceValueMap_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getResourceValueMapCount(); i++) { if (!getResourceValueMap(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { memory_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { virtualCores_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.PARSER, extensionRegistry); if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); resourceValueMap_.add(m); } else { resourceValueMapBuilder_.addMessage(m); } break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long memory_ ; /** * optional int64 memory = 1; * @return Whether the memory field is set. */ @java.lang.Override public boolean hasMemory() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 memory = 1; * @return The memory. */ @java.lang.Override public long getMemory() { return memory_; } /** * optional int64 memory = 1; * @param value The memory to set. * @return This builder for chaining. */ public Builder setMemory(long value) { memory_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int64 memory = 1; * @return This builder for chaining. */ public Builder clearMemory() { bitField0_ = (bitField0_ & ~0x00000001); memory_ = 0L; onChanged(); return this; } private int virtualCores_ ; /** * optional int32 virtual_cores = 2; * @return Whether the virtualCores field is set. */ @java.lang.Override public boolean hasVirtualCores() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 virtual_cores = 2; * @return The virtualCores. */ @java.lang.Override public int getVirtualCores() { return virtualCores_; } /** * optional int32 virtual_cores = 2; * @param value The virtualCores to set. * @return This builder for chaining. */ public Builder setVirtualCores(int value) { virtualCores_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 virtual_cores = 2; * @return This builder for chaining. */ public Builder clearVirtualCores() { bitField0_ = (bitField0_ & ~0x00000002); virtualCores_ = 0; onChanged(); return this; } private java.util.List resourceValueMap_ = java.util.Collections.emptyList(); private void ensureResourceValueMapIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { resourceValueMap_ = new java.util.ArrayList(resourceValueMap_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> resourceValueMapBuilder_; /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public java.util.List getResourceValueMapList() { if (resourceValueMapBuilder_ == null) { return java.util.Collections.unmodifiableList(resourceValueMap_); } else { return resourceValueMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public int getResourceValueMapCount() { if (resourceValueMapBuilder_ == null) { return resourceValueMap_.size(); } else { return resourceValueMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index) { if (resourceValueMapBuilder_ == null) { return resourceValueMap_.get(index); } else { return resourceValueMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder setResourceValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) { if (resourceValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceValueMapIsMutable(); resourceValueMap_.set(index, value); onChanged(); } else { resourceValueMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder setResourceValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) { if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); resourceValueMap_.set(index, builderForValue.build()); onChanged(); } else { resourceValueMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder addResourceValueMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) { if (resourceValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceValueMapIsMutable(); resourceValueMap_.add(value); onChanged(); } else { resourceValueMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder addResourceValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) { if (resourceValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceValueMapIsMutable(); resourceValueMap_.add(index, value); onChanged(); } else { resourceValueMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder addResourceValueMap( org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) { if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); resourceValueMap_.add(builderForValue.build()); onChanged(); } else { resourceValueMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder addResourceValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) { if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); resourceValueMap_.add(index, builderForValue.build()); onChanged(); } else { resourceValueMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder addAllResourceValueMap( java.lang.Iterable values) { if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, resourceValueMap_); onChanged(); } else { resourceValueMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder clearResourceValueMap() { if (resourceValueMapBuilder_ == null) { resourceValueMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { resourceValueMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public Builder removeResourceValueMap(int index) { if (resourceValueMapBuilder_ == null) { ensureResourceValueMapIsMutable(); resourceValueMap_.remove(index); onChanged(); } else { resourceValueMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder getResourceValueMapBuilder( int index) { return getResourceValueMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder( int index) { if (resourceValueMapBuilder_ == null) { return resourceValueMap_.get(index); } else { return resourceValueMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public java.util.List getResourceValueMapOrBuilderList() { if (resourceValueMapBuilder_ != null) { return resourceValueMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resourceValueMap_); } } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder addResourceValueMapBuilder() { return getResourceValueMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder addResourceValueMapBuilder( int index) { return getResourceValueMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3; */ public java.util.List getResourceValueMapBuilderList() { return getResourceValueMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> getResourceValueMapFieldBuilder() { if (resourceValueMapBuilder_ == null) { resourceValueMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder>( resourceValueMap_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); resourceValueMap_ = null; } return resourceValueMapBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceUtilizationProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceUtilizationProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 pmem = 1; * @return Whether the pmem field is set. */ boolean hasPmem(); /** * optional int32 pmem = 1; * @return The pmem. */ int getPmem(); /** * optional int32 vmem = 2; * @return Whether the vmem field is set. */ boolean hasVmem(); /** * optional int32 vmem = 2; * @return The vmem. */ int getVmem(); /** * optional float cpu = 3; * @return Whether the cpu field is set. */ boolean hasCpu(); /** * optional float cpu = 3; * @return The cpu. */ float getCpu(); /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ java.util.List getCustomResourcesList(); /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index); /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ int getCustomResourcesCount(); /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ java.util.List getCustomResourcesOrBuilderList(); /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ResourceUtilizationProto} */ public static final class ResourceUtilizationProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceUtilizationProto) ResourceUtilizationProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceUtilizationProto.newBuilder() to construct. private ResourceUtilizationProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceUtilizationProto() { customResources_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceUtilizationProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder.class); } private int bitField0_; public static final int PMEM_FIELD_NUMBER = 1; private int pmem_ = 0; /** * optional int32 pmem = 1; * @return Whether the pmem field is set. */ @java.lang.Override public boolean hasPmem() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 pmem = 1; * @return The pmem. */ @java.lang.Override public int getPmem() { return pmem_; } public static final int VMEM_FIELD_NUMBER = 2; private int vmem_ = 0; /** * optional int32 vmem = 2; * @return Whether the vmem field is set. */ @java.lang.Override public boolean hasVmem() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 vmem = 2; * @return The vmem. */ @java.lang.Override public int getVmem() { return vmem_; } public static final int CPU_FIELD_NUMBER = 3; private float cpu_ = 0F; /** * optional float cpu = 3; * @return Whether the cpu field is set. */ @java.lang.Override public boolean hasCpu() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float cpu = 3; * @return The cpu. */ @java.lang.Override public float getCpu() { return cpu_; } public static final int CUSTOMRESOURCES_FIELD_NUMBER = 4; @SuppressWarnings("serial") private java.util.List customResources_; /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ @java.lang.Override public java.util.List getCustomResourcesList() { return customResources_; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ @java.lang.Override public java.util.List getCustomResourcesOrBuilderList() { return customResources_; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ @java.lang.Override public int getCustomResourcesCount() { return customResources_.size(); } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index) { return customResources_.get(index); } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder( int index) { return customResources_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getCustomResourcesCount(); i++) { if (!getCustomResources(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, pmem_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, vmem_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeFloat(3, cpu_); } for (int i = 0; i < customResources_.size(); i++) { output.writeMessage(4, customResources_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, pmem_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, vmem_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(3, cpu_); } for (int i = 0; i < customResources_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, customResources_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto) obj; if (hasPmem() != other.hasPmem()) return false; if (hasPmem()) { if (getPmem() != other.getPmem()) return false; } if (hasVmem() != other.hasVmem()) return false; if (hasVmem()) { if (getVmem() != other.getVmem()) return false; } if (hasCpu() != other.hasCpu()) return false; if (hasCpu()) { if (java.lang.Float.floatToIntBits(getCpu()) != java.lang.Float.floatToIntBits( other.getCpu())) return false; } if (!getCustomResourcesList() .equals(other.getCustomResourcesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPmem()) { hash = (37 * hash) + PMEM_FIELD_NUMBER; hash = (53 * hash) + getPmem(); } if (hasVmem()) { hash = (37 * hash) + VMEM_FIELD_NUMBER; hash = (53 * hash) + getVmem(); } if (hasCpu()) { hash = (37 * hash) + CPU_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getCpu()); } if (getCustomResourcesCount() > 0) { hash = (37 * hash) + CUSTOMRESOURCES_FIELD_NUMBER; hash = (53 * hash) + getCustomResourcesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceUtilizationProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceUtilizationProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; pmem_ = 0; vmem_ = 0; cpu_ = 0F; if (customResourcesBuilder_ == null) { customResources_ = java.util.Collections.emptyList(); } else { customResources_ = null; customResourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result) { if (customResourcesBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0)) { customResources_ = java.util.Collections.unmodifiableList(customResources_); bitField0_ = (bitField0_ & ~0x00000008); } result.customResources_ = customResources_; } else { result.customResources_ = customResourcesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.pmem_ = pmem_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.vmem_ = vmem_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.cpu_ = cpu_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) return this; if (other.hasPmem()) { setPmem(other.getPmem()); } if (other.hasVmem()) { setVmem(other.getVmem()); } if (other.hasCpu()) { setCpu(other.getCpu()); } if (customResourcesBuilder_ == null) { if (!other.customResources_.isEmpty()) { if (customResources_.isEmpty()) { customResources_ = other.customResources_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureCustomResourcesIsMutable(); customResources_.addAll(other.customResources_); } onChanged(); } } else { if (!other.customResources_.isEmpty()) { if (customResourcesBuilder_.isEmpty()) { customResourcesBuilder_.dispose(); customResourcesBuilder_ = null; customResources_ = other.customResources_; bitField0_ = (bitField0_ & ~0x00000008); customResourcesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCustomResourcesFieldBuilder() : null; } else { customResourcesBuilder_.addAllMessages(other.customResources_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getCustomResourcesCount(); i++) { if (!getCustomResources(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { pmem_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { vmem_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 29: { cpu_ = input.readFloat(); bitField0_ |= 0x00000004; break; } // case 29 case 34: { org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.PARSER, extensionRegistry); if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); customResources_.add(m); } else { customResourcesBuilder_.addMessage(m); } break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int pmem_ ; /** * optional int32 pmem = 1; * @return Whether the pmem field is set. */ @java.lang.Override public boolean hasPmem() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 pmem = 1; * @return The pmem. */ @java.lang.Override public int getPmem() { return pmem_; } /** * optional int32 pmem = 1; * @param value The pmem to set. * @return This builder for chaining. */ public Builder setPmem(int value) { pmem_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 pmem = 1; * @return This builder for chaining. */ public Builder clearPmem() { bitField0_ = (bitField0_ & ~0x00000001); pmem_ = 0; onChanged(); return this; } private int vmem_ ; /** * optional int32 vmem = 2; * @return Whether the vmem field is set. */ @java.lang.Override public boolean hasVmem() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 vmem = 2; * @return The vmem. */ @java.lang.Override public int getVmem() { return vmem_; } /** * optional int32 vmem = 2; * @param value The vmem to set. * @return This builder for chaining. */ public Builder setVmem(int value) { vmem_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 vmem = 2; * @return This builder for chaining. */ public Builder clearVmem() { bitField0_ = (bitField0_ & ~0x00000002); vmem_ = 0; onChanged(); return this; } private float cpu_ ; /** * optional float cpu = 3; * @return Whether the cpu field is set. */ @java.lang.Override public boolean hasCpu() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float cpu = 3; * @return The cpu. */ @java.lang.Override public float getCpu() { return cpu_; } /** * optional float cpu = 3; * @param value The cpu to set. * @return This builder for chaining. */ public Builder setCpu(float value) { cpu_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional float cpu = 3; * @return This builder for chaining. */ public Builder clearCpu() { bitField0_ = (bitField0_ & ~0x00000004); cpu_ = 0F; onChanged(); return this; } private java.util.List customResources_ = java.util.Collections.emptyList(); private void ensureCustomResourcesIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { customResources_ = new java.util.ArrayList(customResources_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> customResourcesBuilder_; /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public java.util.List getCustomResourcesList() { if (customResourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(customResources_); } else { return customResourcesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public int getCustomResourcesCount() { if (customResourcesBuilder_ == null) { return customResources_.size(); } else { return customResourcesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index) { if (customResourcesBuilder_ == null) { return customResources_.get(index); } else { return customResourcesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder setCustomResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) { if (customResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomResourcesIsMutable(); customResources_.set(index, value); onChanged(); } else { customResourcesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder setCustomResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) { if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); customResources_.set(index, builderForValue.build()); onChanged(); } else { customResourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder addCustomResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) { if (customResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomResourcesIsMutable(); customResources_.add(value); onChanged(); } else { customResourcesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder addCustomResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) { if (customResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomResourcesIsMutable(); customResources_.add(index, value); onChanged(); } else { customResourcesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder addCustomResources( org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) { if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); customResources_.add(builderForValue.build()); onChanged(); } else { customResourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder addCustomResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) { if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); customResources_.add(index, builderForValue.build()); onChanged(); } else { customResourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder addAllCustomResources( java.lang.Iterable values) { if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, customResources_); onChanged(); } else { customResourcesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder clearCustomResources() { if (customResourcesBuilder_ == null) { customResources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { customResourcesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public Builder removeCustomResources(int index) { if (customResourcesBuilder_ == null) { ensureCustomResourcesIsMutable(); customResources_.remove(index); onChanged(); } else { customResourcesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder getCustomResourcesBuilder( int index) { return getCustomResourcesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder( int index) { if (customResourcesBuilder_ == null) { return customResources_.get(index); } else { return customResourcesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public java.util.List getCustomResourcesOrBuilderList() { if (customResourcesBuilder_ != null) { return customResourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(customResources_); } } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder addCustomResourcesBuilder() { return getCustomResourcesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder addCustomResourcesBuilder( int index) { return getCustomResourcesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringFloatMapProto customResources = 4; */ public java.util.List getCustomResourcesBuilderList() { return getCustomResourcesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> getCustomResourcesFieldBuilder() { if (customResourcesBuilder_ == null) { customResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder>( customResources_, ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); customResources_ = null; } return customResourcesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceUtilizationProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceUtilizationProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceUtilizationProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceOptionProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceOptionProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource(); /** * optional .hadoop.yarn.ResourceProto resource = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder(); /** * optional int32 over_commit_timeout = 2; * @return Whether the overCommitTimeout field is set. */ boolean hasOverCommitTimeout(); /** * optional int32 over_commit_timeout = 2; * @return The overCommitTimeout. */ int getOverCommitTimeout(); } /** * Protobuf type {@code hadoop.yarn.ResourceOptionProto} */ public static final class ResourceOptionProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceOptionProto) ResourceOptionProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceOptionProto.newBuilder() to construct. private ResourceOptionProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceOptionProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceOptionProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder.class); } private int bitField0_; public static final int RESOURCE_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } public static final int OVER_COMMIT_TIMEOUT_FIELD_NUMBER = 2; private int overCommitTimeout_ = 0; /** * optional int32 over_commit_timeout = 2; * @return Whether the overCommitTimeout field is set. */ @java.lang.Override public boolean hasOverCommitTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 over_commit_timeout = 2; * @return The overCommitTimeout. */ @java.lang.Override public int getOverCommitTimeout() { return overCommitTimeout_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getResource()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, overCommitTimeout_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getResource()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, overCommitTimeout_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto) obj; if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (hasOverCommitTimeout() != other.hasOverCommitTimeout()) return false; if (hasOverCommitTimeout()) { if (getOverCommitTimeout() != other.getOverCommitTimeout()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } if (hasOverCommitTimeout()) { hash = (37 * hash) + OVER_COMMIT_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getOverCommitTimeout(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceOptionProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceOptionProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } overCommitTimeout_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.overCommitTimeout_ = overCommitTimeout_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance()) return this; if (other.hasResource()) { mergeResource(other.getResource()); } if (other.hasOverCommitTimeout()) { setOverCommitTimeout(other.getOverCommitTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { overCommitTimeout_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 1; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000001); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceProto resource = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private int overCommitTimeout_ ; /** * optional int32 over_commit_timeout = 2; * @return Whether the overCommitTimeout field is set. */ @java.lang.Override public boolean hasOverCommitTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 over_commit_timeout = 2; * @return The overCommitTimeout. */ @java.lang.Override public int getOverCommitTimeout() { return overCommitTimeout_; } /** * optional int32 over_commit_timeout = 2; * @param value The overCommitTimeout to set. * @return This builder for chaining. */ public Builder setOverCommitTimeout(int value) { overCommitTimeout_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 over_commit_timeout = 2; * @return This builder for chaining. */ public Builder clearOverCommitTimeout() { bitField0_ = (bitField0_ & ~0x00000002); overCommitTimeout_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceOptionProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceOptionProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceOptionProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceProfileEntryOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProfileEntry) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string name = 1; * @return Whether the name field is set. */ boolean hasName(); /** * required string name = 1; * @return The name. */ java.lang.String getName(); /** * required string name = 1; * @return The bytes for name. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes(); /** * required .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ boolean hasResources(); /** * required .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources(); /** * required .hadoop.yarn.ResourceProto resources = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ResourceProfileEntry} */ public static final class ResourceProfileEntry extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProfileEntry) ResourceProfileEntryOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceProfileEntry.newBuilder() to construct. private ResourceProfileEntry(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceProfileEntry() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceProfileEntry(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * required string name = 1; * @return Whether the name field is set. */ @java.lang.Override public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string name = 1; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * required string name = 1; * @return The bytes for name. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RESOURCES_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; /** * required .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ @java.lang.Override public boolean hasResources() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } /** * required .hadoop.yarn.ResourceProto resources = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } if (!hasResources()) { memoizedIsInitialized = 0; return false; } if (!getResources().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getResources()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getResources()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry) obj; if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName() .equals(other.getName())) return false; } if (hasResources() != other.hasResources()) return false; if (hasResources()) { if (!getResources() .equals(other.getResources())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasResources()) { hash = (37 * hash) + RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getResources().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceProfileEntry} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProfileEntry) org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourcesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; resources_ = null; if (resourcesBuilder_ != null) { resourcesBuilder_.dispose(); resourcesBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resources_ = resourcesBuilder_ == null ? resources_ : resourcesBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance()) return this; if (other.hasName()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasResources()) { mergeResources(other.getResources()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasName()) { return false; } if (!hasResources()) { return false; } if (!getResources().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getResourcesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * required string name = 1; * @return Whether the name field is set. */ public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string name = 1; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string name = 1; * @return The bytes for name. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string name = 1; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string name = 1; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string name = 1; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_; /** * required .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ public boolean hasResources() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { if (resourcesBuilder_ == null) { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } else { return resourcesBuilder_.getMessage(); } } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public Builder setResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resources_ = value; } else { resourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public Builder setResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourcesBuilder_ == null) { resources_ = builderForValue.build(); } else { resourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public Builder mergeResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && resources_ != null && resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourcesBuilder().mergeFrom(value); } else { resources_ = value; } } else { resourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public Builder clearResources() { bitField0_ = (bitField0_ & ~0x00000002); resources_ = null; if (resourcesBuilder_ != null) { resourcesBuilder_.dispose(); resourcesBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResourcesFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.ResourceProto resources = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { if (resourcesBuilder_ != null) { return resourcesBuilder_.getMessageOrBuilder(); } else { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } } /** * required .hadoop.yarn.ResourceProto resources = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourcesFieldBuilder() { if (resourcesBuilder_ == null) { resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResources(), getParentForChildren(), isClean()); resources_ = null; } return resourcesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProfileEntry) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProfileEntry) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceProfileEntry parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceProfilesProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProfilesProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ java.util.List getResourceProfilesMapList(); /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index); /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ int getResourceProfilesMapCount(); /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ java.util.List getResourceProfilesMapOrBuilderList(); /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ResourceProfilesProto} */ public static final class ResourceProfilesProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProfilesProto) ResourceProfilesProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceProfilesProto.newBuilder() to construct. private ResourceProfilesProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceProfilesProto() { resourceProfilesMap_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceProfilesProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder.class); } public static final int RESOURCE_PROFILES_MAP_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List resourceProfilesMap_; /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ @java.lang.Override public java.util.List getResourceProfilesMapList() { return resourceProfilesMap_; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ @java.lang.Override public java.util.List getResourceProfilesMapOrBuilderList() { return resourceProfilesMap_; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ @java.lang.Override public int getResourceProfilesMapCount() { return resourceProfilesMap_.size(); } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index) { return resourceProfilesMap_.get(index); } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder( int index) { return resourceProfilesMap_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getResourceProfilesMapCount(); i++) { if (!getResourceProfilesMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < resourceProfilesMap_.size(); i++) { output.writeMessage(1, resourceProfilesMap_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < resourceProfilesMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, resourceProfilesMap_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto) obj; if (!getResourceProfilesMapList() .equals(other.getResourceProfilesMapList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResourceProfilesMapCount() > 0) { hash = (37 * hash) + RESOURCE_PROFILES_MAP_FIELD_NUMBER; hash = (53 * hash) + getResourceProfilesMapList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceProfilesProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProfilesProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (resourceProfilesMapBuilder_ == null) { resourceProfilesMap_ = java.util.Collections.emptyList(); } else { resourceProfilesMap_ = null; resourceProfilesMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result) { if (resourceProfilesMapBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { resourceProfilesMap_ = java.util.Collections.unmodifiableList(resourceProfilesMap_); bitField0_ = (bitField0_ & ~0x00000001); } result.resourceProfilesMap_ = resourceProfilesMap_; } else { result.resourceProfilesMap_ = resourceProfilesMapBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) return this; if (resourceProfilesMapBuilder_ == null) { if (!other.resourceProfilesMap_.isEmpty()) { if (resourceProfilesMap_.isEmpty()) { resourceProfilesMap_ = other.resourceProfilesMap_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.addAll(other.resourceProfilesMap_); } onChanged(); } } else { if (!other.resourceProfilesMap_.isEmpty()) { if (resourceProfilesMapBuilder_.isEmpty()) { resourceProfilesMapBuilder_.dispose(); resourceProfilesMapBuilder_ = null; resourceProfilesMap_ = other.resourceProfilesMap_; bitField0_ = (bitField0_ & ~0x00000001); resourceProfilesMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResourceProfilesMapFieldBuilder() : null; } else { resourceProfilesMapBuilder_.addAllMessages(other.resourceProfilesMap_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getResourceProfilesMapCount(); i++) { if (!getResourceProfilesMap(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.PARSER, extensionRegistry); if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.add(m); } else { resourceProfilesMapBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List resourceProfilesMap_ = java.util.Collections.emptyList(); private void ensureResourceProfilesMapIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { resourceProfilesMap_ = new java.util.ArrayList(resourceProfilesMap_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> resourceProfilesMapBuilder_; /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public java.util.List getResourceProfilesMapList() { if (resourceProfilesMapBuilder_ == null) { return java.util.Collections.unmodifiableList(resourceProfilesMap_); } else { return resourceProfilesMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public int getResourceProfilesMapCount() { if (resourceProfilesMapBuilder_ == null) { return resourceProfilesMap_.size(); } else { return resourceProfilesMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index) { if (resourceProfilesMapBuilder_ == null) { return resourceProfilesMap_.get(index); } else { return resourceProfilesMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder setResourceProfilesMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) { if (resourceProfilesMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.set(index, value); onChanged(); } else { resourceProfilesMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder setResourceProfilesMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) { if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.set(index, builderForValue.build()); onChanged(); } else { resourceProfilesMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder addResourceProfilesMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) { if (resourceProfilesMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.add(value); onChanged(); } else { resourceProfilesMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder addResourceProfilesMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) { if (resourceProfilesMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.add(index, value); onChanged(); } else { resourceProfilesMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder addResourceProfilesMap( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) { if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.add(builderForValue.build()); onChanged(); } else { resourceProfilesMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder addResourceProfilesMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) { if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.add(index, builderForValue.build()); onChanged(); } else { resourceProfilesMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder addAllResourceProfilesMap( java.lang.Iterable values) { if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, resourceProfilesMap_); onChanged(); } else { resourceProfilesMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder clearResourceProfilesMap() { if (resourceProfilesMapBuilder_ == null) { resourceProfilesMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { resourceProfilesMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public Builder removeResourceProfilesMap(int index) { if (resourceProfilesMapBuilder_ == null) { ensureResourceProfilesMapIsMutable(); resourceProfilesMap_.remove(index); onChanged(); } else { resourceProfilesMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder getResourceProfilesMapBuilder( int index) { return getResourceProfilesMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder( int index) { if (resourceProfilesMapBuilder_ == null) { return resourceProfilesMap_.get(index); } else { return resourceProfilesMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public java.util.List getResourceProfilesMapOrBuilderList() { if (resourceProfilesMapBuilder_ != null) { return resourceProfilesMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resourceProfilesMap_); } } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder addResourceProfilesMapBuilder() { return getResourceProfilesMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder addResourceProfilesMapBuilder( int index) { return getResourceProfilesMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1; */ public java.util.List getResourceProfilesMapBuilderList() { return getResourceProfilesMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> getResourceProfilesMapFieldBuilder() { if (resourceProfilesMapBuilder_ == null) { resourceProfilesMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder>( resourceProfilesMap_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); resourceProfilesMap_ = null; } return resourceProfilesMapBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProfilesProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProfilesProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceProfilesProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeResourceMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeResourceMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return Whether the nodeId field is set. */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return The nodeId. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return Whether the resourceOption field is set. */ boolean hasResourceOption(); /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return The resourceOption. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption(); /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.NodeResourceMapProto} */ public static final class NodeResourceMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeResourceMapProto) NodeResourceMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeResourceMapProto.newBuilder() to construct. private NodeResourceMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeResourceMapProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeResourceMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder.class); } private int bitField0_; public static final int NODE_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return Whether the nodeId field is set. */ @java.lang.Override public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return The nodeId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int RESOURCE_OPTION_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto resourceOption_; /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return Whether the resourceOption field is set. */ @java.lang.Override public boolean hasResourceOption() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return The resourceOption. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption() { return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_; } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder() { return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResourceOption()) { if (!getResourceOption().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getResourceOption()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getResourceOption()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto) obj; if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (hasResourceOption() != other.hasResourceOption()) return false; if (hasResourceOption()) { if (!getResourceOption() .equals(other.getResourceOption())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeId()) { hash = (37 * hash) + NODE_ID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (hasResourceOption()) { hash = (37 * hash) + RESOURCE_OPTION_FIELD_NUMBER; hash = (53 * hash) + getResourceOption().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeResourceMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeResourceMapProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeIdFieldBuilder(); getResourceOptionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } resourceOption_ = null; if (resourceOptionBuilder_ != null) { resourceOptionBuilder_.dispose(); resourceOptionBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.nodeId_ = nodeIdBuilder_ == null ? nodeId_ : nodeIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resourceOption_ = resourceOptionBuilder_ == null ? resourceOption_ : resourceOptionBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance()) return this; if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (other.hasResourceOption()) { mergeResourceOption(other.getResourceOption()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResourceOption()) { if (!getResourceOption().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNodeIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getResourceOptionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return Whether the nodeId field is set. */ public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; * @return The nodeId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { getNodeIdBuilder().mergeFrom(value); } else { nodeId_ = value; } } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public Builder clearNodeId() { bitField0_ = (bitField0_ & ~0x00000001); nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto node_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto resourceOption_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder> resourceOptionBuilder_; /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return Whether the resourceOption field is set. */ public boolean hasResourceOption() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; * @return The resourceOption. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption() { if (resourceOptionBuilder_ == null) { return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_; } else { return resourceOptionBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public Builder setResourceOption(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto value) { if (resourceOptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resourceOption_ = value; } else { resourceOptionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public Builder setResourceOption( org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder builderForValue) { if (resourceOptionBuilder_ == null) { resourceOption_ = builderForValue.build(); } else { resourceOptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public Builder mergeResourceOption(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto value) { if (resourceOptionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && resourceOption_ != null && resourceOption_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance()) { getResourceOptionBuilder().mergeFrom(value); } else { resourceOption_ = value; } } else { resourceOptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public Builder clearResourceOption() { bitField0_ = (bitField0_ & ~0x00000002); resourceOption_ = null; if (resourceOptionBuilder_ != null) { resourceOptionBuilder_.dispose(); resourceOptionBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder getResourceOptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResourceOptionFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder() { if (resourceOptionBuilder_ != null) { return resourceOptionBuilder_.getMessageOrBuilder(); } else { return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_; } } /** * optional .hadoop.yarn.ResourceOptionProto resource_option = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder> getResourceOptionFieldBuilder() { if (resourceOptionBuilder_ == null) { resourceOptionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder>( getResourceOption(), getParentForChildren(), isClean()); resourceOption_ = null; } return resourceOptionBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeResourceMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeResourceMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeResourceMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PriorityProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PriorityProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 priority = 1; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional int32 priority = 1; * @return The priority. */ int getPriority(); } /** * Protobuf type {@code hadoop.yarn.PriorityProto} */ public static final class PriorityProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PriorityProto) PriorityProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PriorityProto.newBuilder() to construct. private PriorityProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PriorityProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PriorityProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder.class); } private int bitField0_; public static final int PRIORITY_FIELD_NUMBER = 1; private int priority_ = 0; /** * optional int32 priority = 1; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 priority = 1; * @return The priority. */ @java.lang.Override public int getPriority() { return priority_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, priority_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, priority_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto) obj; if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (getPriority() != other.getPriority()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PriorityProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PriorityProto) org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; priority_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.priority_ = priority_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) return this; if (other.hasPriority()) { setPriority(other.getPriority()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { priority_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int priority_ ; /** * optional int32 priority = 1; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 priority = 1; * @return The priority. */ @java.lang.Override public int getPriority() { return priority_; } /** * optional int32 priority = 1; * @param value The priority to set. * @return This builder for chaining. */ public Builder setPriority(int value) { priority_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 priority = 1; * @return This builder for chaining. */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000001); priority_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PriorityProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PriorityProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PriorityProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ boolean hasId(); /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId(); /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return Whether the nodeId field is set. */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return The nodeId. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * optional string node_http_address = 3; * @return Whether the nodeHttpAddress field is set. */ boolean hasNodeHttpAddress(); /** * optional string node_http_address = 3; * @return The nodeHttpAddress. */ java.lang.String getNodeHttpAddress(); /** * optional string node_http_address = 3; * @return The bytes for nodeHttpAddress. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes(); /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource(); /** * optional .hadoop.yarn.ResourceProto resource = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder(); /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional .hadoop.common.TokenProto container_token = 6; * @return Whether the containerToken field is set. */ boolean hasContainerToken(); /** * optional .hadoop.common.TokenProto container_token = 6; * @return The containerToken. */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken(); /** * optional .hadoop.common.TokenProto container_token = 6; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder(); /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return Whether the executionType field is set. */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return The executionType. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType(); /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ boolean hasAllocationRequestId(); /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ long getAllocationRequestId(); /** * optional int32 version = 9 [default = 0]; * @return Whether the version field is set. */ boolean hasVersion(); /** * optional int32 version = 9 [default = 0]; * @return The version. */ int getVersion(); /** * repeated string allocation_tags = 10; * @return A list containing the allocationTags. */ java.util.List getAllocationTagsList(); /** * repeated string allocation_tags = 10; * @return The count of allocationTags. */ int getAllocationTagsCount(); /** * repeated string allocation_tags = 10; * @param index The index of the element to return. * @return The allocationTags at the given index. */ java.lang.String getAllocationTags(int index); /** * repeated string allocation_tags = 10; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index); /** * optional string exposed_ports = 11; * @return Whether the exposedPorts field is set. */ boolean hasExposedPorts(); /** * optional string exposed_ports = 11; * @return The exposedPorts. */ java.lang.String getExposedPorts(); /** * optional string exposed_ports = 11; * @return The bytes for exposedPorts. */ org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes(); } /** * Protobuf type {@code hadoop.yarn.ContainerProto} */ public static final class ContainerProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerProto) ContainerProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerProto.newBuilder() to construct. private ContainerProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerProto() { nodeHttpAddress_ = ""; executionType_ = 1; allocationRequestId_ = -1L; allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; exposedPorts_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder.class); } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_; /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } public static final int NODEID_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return Whether the nodeId field is set. */ @java.lang.Override public boolean hasNodeId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return The nodeId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int NODE_HTTP_ADDRESS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object nodeHttpAddress_ = ""; /** * optional string node_http_address = 3; * @return Whether the nodeHttpAddress field is set. */ @java.lang.Override public boolean hasNodeHttpAddress() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string node_http_address = 3; * @return The nodeHttpAddress. */ @java.lang.Override public java.lang.String getNodeHttpAddress() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeHttpAddress_ = s; } return s; } } /** * optional string node_http_address = 3; * @return The bytes for nodeHttpAddress. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeHttpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RESOURCE_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } public static final int PRIORITY_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int CONTAINER_TOKEN_FIELD_NUMBER = 6; private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_; /** * optional .hadoop.common.TokenProto container_token = 6; * @return Whether the containerToken field is set. */ @java.lang.Override public boolean hasContainerToken() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.common.TokenProto container_token = 6; * @return The containerToken. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } /** * optional .hadoop.common.TokenProto container_token = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } public static final int EXECUTION_TYPE_FIELD_NUMBER = 7; private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 8; private long allocationRequestId_ = -1L; /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } public static final int VERSION_FIELD_NUMBER = 9; private int version_ = 0; /** * optional int32 version = 9 [default = 0]; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int32 version = 9 [default = 0]; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } public static final int ALLOCATION_TAGS_FIELD_NUMBER = 10; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_; /** * repeated string allocation_tags = 10; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_; } /** * repeated string allocation_tags = 10; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocation_tags = 10; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocation_tags = 10; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } public static final int EXPOSED_PORTS_FIELD_NUMBER = 11; @SuppressWarnings("serial") private volatile java.lang.Object exposedPorts_ = ""; /** * optional string exposed_ports = 11; * @return Whether the exposedPorts field is set. */ @java.lang.Override public boolean hasExposedPorts() { return ((bitField0_ & 0x00000200) != 0); } /** * optional string exposed_ports = 11; * @return The exposedPorts. */ @java.lang.Override public java.lang.String getExposedPorts() { java.lang.Object ref = exposedPorts_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exposedPorts_ = s; } return s; } } /** * optional string exposed_ports = 11; * @return The bytes for exposedPorts. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes() { java.lang.Object ref = exposedPorts_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exposedPorts_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasContainerToken()) { if (!getContainerToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getNodeId()); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, nodeHttpAddress_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getResource()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(5, getPriority()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeMessage(6, getContainerToken()); } if (((bitField0_ & 0x00000040) != 0)) { output.writeEnum(7, executionType_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeInt64(8, allocationRequestId_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeInt32(9, version_); } for (int i = 0; i < allocationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, allocationTags_.getRaw(i)); } if (((bitField0_ & 0x00000200) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, exposedPorts_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getNodeId()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, nodeHttpAddress_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getResource()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getPriority()); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, getContainerToken()); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(7, executionType_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(8, allocationRequestId_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(9, version_); } { int dataSize = 0; for (int i = 0; i < allocationTags_.size(); i++) { dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i)); } size += dataSize; size += 1 * getAllocationTagsList().size(); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, exposedPorts_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto) obj; if (hasId() != other.hasId()) return false; if (hasId()) { if (!getId() .equals(other.getId())) return false; } if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (hasNodeHttpAddress() != other.hasNodeHttpAddress()) return false; if (hasNodeHttpAddress()) { if (!getNodeHttpAddress() .equals(other.getNodeHttpAddress())) return false; } if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasContainerToken() != other.hasContainerToken()) return false; if (hasContainerToken()) { if (!getContainerToken() .equals(other.getContainerToken())) return false; } if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (executionType_ != other.executionType_) return false; } if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false; if (hasAllocationRequestId()) { if (getAllocationRequestId() != other.getAllocationRequestId()) return false; } if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (getVersion() != other.getVersion()) return false; } if (!getAllocationTagsList() .equals(other.getAllocationTagsList())) return false; if (hasExposedPorts() != other.hasExposedPorts()) return false; if (hasExposedPorts()) { if (!getExposedPorts() .equals(other.getExposedPorts())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); } if (hasNodeId()) { hash = (37 * hash) + NODEID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (hasNodeHttpAddress()) { hash = (37 * hash) + NODE_HTTP_ADDRESS_FIELD_NUMBER; hash = (53 * hash) + getNodeHttpAddress().hashCode(); } if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasContainerToken()) { hash = (37 * hash) + CONTAINER_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getContainerToken().hashCode(); } if (hasExecutionType()) { hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + executionType_; } if (hasAllocationRequestId()) { hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocationRequestId()); } if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion(); } if (getAllocationTagsCount() > 0) { hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER; hash = (53 * hash) + getAllocationTagsList().hashCode(); } if (hasExposedPorts()) { hash = (37 * hash) + EXPOSED_PORTS_FIELD_NUMBER; hash = (53 * hash) + getExposedPorts().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getIdFieldBuilder(); getNodeIdFieldBuilder(); getResourceFieldBuilder(); getPriorityFieldBuilder(); getContainerTokenFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = null; if (idBuilder_ != null) { idBuilder_.dispose(); idBuilder_ = null; } nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } nodeHttpAddress_ = ""; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } containerToken_ = null; if (containerTokenBuilder_ != null) { containerTokenBuilder_.dispose(); containerTokenBuilder_ = null; } executionType_ = 1; allocationRequestId_ = -1L; version_ = 0; allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); exposedPorts_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result) { if (((bitField0_ & 0x00000200) != 0)) { allocationTags_ = allocationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000200); } result.allocationTags_ = allocationTags_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = idBuilder_ == null ? id_ : idBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.nodeId_ = nodeIdBuilder_ == null ? nodeId_ : nodeIdBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.nodeHttpAddress_ = nodeHttpAddress_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.containerToken_ = containerTokenBuilder_ == null ? containerToken_ : containerTokenBuilder_.build(); to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.executionType_ = executionType_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.allocationRequestId_ = allocationRequestId_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.version_ = version_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000400) != 0)) { result.exposedPorts_ = exposedPorts_; to_bitField0_ |= 0x00000200; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()) return this; if (other.hasId()) { mergeId(other.getId()); } if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (other.hasNodeHttpAddress()) { nodeHttpAddress_ = other.nodeHttpAddress_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasResource()) { mergeResource(other.getResource()); } if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasContainerToken()) { mergeContainerToken(other.getContainerToken()); } if (other.hasExecutionType()) { setExecutionType(other.getExecutionType()); } if (other.hasAllocationRequestId()) { setAllocationRequestId(other.getAllocationRequestId()); } if (other.hasVersion()) { setVersion(other.getVersion()); } if (!other.allocationTags_.isEmpty()) { if (allocationTags_.isEmpty()) { allocationTags_ = other.allocationTags_; bitField0_ = (bitField0_ & ~0x00000200); } else { ensureAllocationTagsIsMutable(); allocationTags_.addAll(other.allocationTags_); } onChanged(); } if (other.hasExposedPorts()) { exposedPorts_ = other.exposedPorts_; bitField0_ |= 0x00000400; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } if (hasContainerToken()) { if (!getContainerToken().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getNodeIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { nodeHttpAddress_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 case 42: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 50: { input.readMessage( getContainerTokenFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000020; break; } // case 50 case 56: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(7, tmpRaw); } else { executionType_ = tmpRaw; bitField0_ |= 0x00000040; } break; } // case 56 case 64: { allocationRequestId_ = input.readInt64(); bitField0_ |= 0x00000080; break; } // case 64 case 72: { version_ = input.readInt32(); bitField0_ |= 0x00000100; break; } // case 72 case 82: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureAllocationTagsIsMutable(); allocationTags_.add(bs); break; } // case 82 case 90: { exposedPorts_ = input.readBytes(); bitField0_ |= 0x00000400; break; } // case 90 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> idBuilder_; /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() { if (idBuilder_ == null) { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } else { return idBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder setId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (idBuilder_ == null) { if (value == null) { throw new NullPointerException(); } id_ = value; } else { idBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder setId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (idBuilder_ == null) { id_ = builderForValue.build(); } else { idBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder mergeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (idBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && id_ != null && id_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { getIdBuilder().mergeFrom(value); } else { id_ = value; } } else { idBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = null; if (idBuilder_ != null) { idBuilder_.dispose(); idBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() { if (idBuilder_ != null) { return idBuilder_.getMessageOrBuilder(); } else { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getIdFieldBuilder() { if (idBuilder_ == null) { idBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getId(), getParentForChildren(), isClean()); id_ = null; } return idBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return Whether the nodeId field is set. */ public boolean hasNodeId() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; * @return The nodeId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { getNodeIdBuilder().mergeFrom(value); } else { nodeId_ = value; } } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder clearNodeId() { bitField0_ = (bitField0_ & ~0x00000002); nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000002; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private java.lang.Object nodeHttpAddress_ = ""; /** * optional string node_http_address = 3; * @return Whether the nodeHttpAddress field is set. */ public boolean hasNodeHttpAddress() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string node_http_address = 3; * @return The nodeHttpAddress. */ public java.lang.String getNodeHttpAddress() { java.lang.Object ref = nodeHttpAddress_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeHttpAddress_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string node_http_address = 3; * @return The bytes for nodeHttpAddress. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeHttpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string node_http_address = 3; * @param value The nodeHttpAddress to set. * @return This builder for chaining. */ public Builder setNodeHttpAddress( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodeHttpAddress_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string node_http_address = 3; * @return This builder for chaining. */ public Builder clearNodeHttpAddress() { nodeHttpAddress_ = getDefaultInstance().getNodeHttpAddress(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string node_http_address = 3; * @param value The bytes for nodeHttpAddress to set. * @return This builder for chaining. */ public Builder setNodeHttpAddressBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nodeHttpAddress_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 4; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000008); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() { bitField0_ |= 0x00000008; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceProto resource = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 5; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000010); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000010; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> containerTokenBuilder_; /** * optional .hadoop.common.TokenProto container_token = 6; * @return Whether the containerToken field is set. */ public boolean hasContainerToken() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.common.TokenProto container_token = 6; * @return The containerToken. */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() { if (containerTokenBuilder_ == null) { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } else { return containerTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto container_token = 6; */ public Builder setContainerToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (containerTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerToken_ = value; } else { containerTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.common.TokenProto container_token = 6; */ public Builder setContainerToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (containerTokenBuilder_ == null) { containerToken_ = builderForValue.build(); } else { containerTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.common.TokenProto container_token = 6; */ public Builder mergeContainerToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (containerTokenBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && containerToken_ != null && containerToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { getContainerTokenBuilder().mergeFrom(value); } else { containerToken_ = value; } } else { containerTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.common.TokenProto container_token = 6; */ public Builder clearContainerToken() { bitField0_ = (bitField0_ & ~0x00000020); containerToken_ = null; if (containerTokenBuilder_ != null) { containerTokenBuilder_.dispose(); containerTokenBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.common.TokenProto container_token = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getContainerTokenBuilder() { bitField0_ |= 0x00000020; onChanged(); return getContainerTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto container_token = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() { if (containerTokenBuilder_ != null) { return containerTokenBuilder_.getMessageOrBuilder(); } else { return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_; } } /** * optional .hadoop.common.TokenProto container_token = 6; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getContainerTokenFieldBuilder() { if (containerTokenBuilder_ == null) { containerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getContainerToken(), getParentForChildren(), isClean()); containerToken_ = null; } return containerTokenBuilder_; } private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @param value The executionType to set. * @return This builder for chaining. */ public Builder setExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; executionType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED]; * @return This builder for chaining. */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000040); executionType_ = 1; onChanged(); return this; } private long allocationRequestId_ = -1L; /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } /** * optional int64 allocation_request_id = 8 [default = -1]; * @param value The allocationRequestId to set. * @return This builder for chaining. */ public Builder setAllocationRequestId(long value) { allocationRequestId_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return This builder for chaining. */ public Builder clearAllocationRequestId() { bitField0_ = (bitField0_ & ~0x00000080); allocationRequestId_ = -1L; onChanged(); return this; } private int version_ ; /** * optional int32 version = 9 [default = 0]; * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int32 version = 9 [default = 0]; * @return The version. */ @java.lang.Override public int getVersion() { return version_; } /** * optional int32 version = 9 [default = 0]; * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(int value) { version_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional int32 version = 9 [default = 0]; * @return This builder for chaining. */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000100); version_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureAllocationTagsIsMutable() { if (!((bitField0_ & 0x00000200) != 0)) { allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_); bitField0_ |= 0x00000200; } } /** * repeated string allocation_tags = 10; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_.getUnmodifiableView(); } /** * repeated string allocation_tags = 10; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocation_tags = 10; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocation_tags = 10; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } /** * repeated string allocation_tags = 10; * @param index The index to set the value at. * @param value The allocationTags to set. * @return This builder for chaining. */ public Builder setAllocationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.set(index, value); onChanged(); return this; } /** * repeated string allocation_tags = 10; * @param value The allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } /** * repeated string allocation_tags = 10; * @param values The allocationTags to add. * @return This builder for chaining. */ public Builder addAllAllocationTags( java.lang.Iterable values) { ensureAllocationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, allocationTags_); onChanged(); return this; } /** * repeated string allocation_tags = 10; * @return This builder for chaining. */ public Builder clearAllocationTags() { allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * repeated string allocation_tags = 10; * @param value The bytes of the allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } private java.lang.Object exposedPorts_ = ""; /** * optional string exposed_ports = 11; * @return Whether the exposedPorts field is set. */ public boolean hasExposedPorts() { return ((bitField0_ & 0x00000400) != 0); } /** * optional string exposed_ports = 11; * @return The exposedPorts. */ public java.lang.String getExposedPorts() { java.lang.Object ref = exposedPorts_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exposedPorts_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string exposed_ports = 11; * @return The bytes for exposedPorts. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes() { java.lang.Object ref = exposedPorts_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exposedPorts_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string exposed_ports = 11; * @param value The exposedPorts to set. * @return This builder for chaining. */ public Builder setExposedPorts( java.lang.String value) { if (value == null) { throw new NullPointerException(); } exposedPorts_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional string exposed_ports = 11; * @return This builder for chaining. */ public Builder clearExposedPorts() { exposedPorts_ = getDefaultInstance().getExposedPorts(); bitField0_ = (bitField0_ & ~0x00000400); onChanged(); return this; } /** * optional string exposed_ports = 11; * @param value The bytes for exposedPorts to set. * @return This builder for chaining. */ public Builder setExposedPortsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } exposedPorts_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerReportProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerReportProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource(); /** * optional .hadoop.yarn.ResourceProto resource = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder(); /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return Whether the nodeId field is set. */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return The nodeId. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional int64 creation_time = 5; * @return Whether the creationTime field is set. */ boolean hasCreationTime(); /** * optional int64 creation_time = 5; * @return The creationTime. */ long getCreationTime(); /** * optional int64 finish_time = 6; * @return Whether the finishTime field is set. */ boolean hasFinishTime(); /** * optional int64 finish_time = 6; * @return The finishTime. */ long getFinishTime(); /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return Whether the diagnosticsInfo field is set. */ boolean hasDiagnosticsInfo(); /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The diagnosticsInfo. */ java.lang.String getDiagnosticsInfo(); /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The bytes for diagnosticsInfo. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsInfoBytes(); /** * optional string log_url = 8; * @return Whether the logUrl field is set. */ boolean hasLogUrl(); /** * optional string log_url = 8; * @return The logUrl. */ java.lang.String getLogUrl(); /** * optional string log_url = 8; * @return The bytes for logUrl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getLogUrlBytes(); /** * optional int32 container_exit_status = 9; * @return Whether the containerExitStatus field is set. */ boolean hasContainerExitStatus(); /** * optional int32 container_exit_status = 9; * @return The containerExitStatus. */ int getContainerExitStatus(); /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return Whether the containerState field is set. */ boolean hasContainerState(); /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return The containerState. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState(); /** * optional string node_http_address = 11; * @return Whether the nodeHttpAddress field is set. */ boolean hasNodeHttpAddress(); /** * optional string node_http_address = 11; * @return The nodeHttpAddress. */ java.lang.String getNodeHttpAddress(); /** * optional string node_http_address = 11; * @return The bytes for nodeHttpAddress. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes(); /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return Whether the executionType field is set. */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return The executionType. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType(); /** * optional string exposed_ports = 13; * @return Whether the exposedPorts field is set. */ boolean hasExposedPorts(); /** * optional string exposed_ports = 13; * @return The exposedPorts. */ java.lang.String getExposedPorts(); /** * optional string exposed_ports = 13; * @return The bytes for exposedPorts. */ org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes(); } /** * Protobuf type {@code hadoop.yarn.ContainerReportProto} */ public static final class ContainerReportProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerReportProto) ContainerReportProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerReportProto.newBuilder() to construct. private ContainerReportProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerReportProto() { diagnosticsInfo_ = "N/A"; logUrl_ = ""; containerState_ = 1; nodeHttpAddress_ = ""; executionType_ = 1; exposedPorts_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerReportProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ @java.lang.Override public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int RESOURCE_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } public static final int NODE_ID_FIELD_NUMBER = 3; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return Whether the nodeId field is set. */ @java.lang.Override public boolean hasNodeId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return The nodeId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int PRIORITY_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int CREATION_TIME_FIELD_NUMBER = 5; private long creationTime_ = 0L; /** * optional int64 creation_time = 5; * @return Whether the creationTime field is set. */ @java.lang.Override public boolean hasCreationTime() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 creation_time = 5; * @return The creationTime. */ @java.lang.Override public long getCreationTime() { return creationTime_; } public static final int FINISH_TIME_FIELD_NUMBER = 6; private long finishTime_ = 0L; /** * optional int64 finish_time = 6; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 finish_time = 6; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 7; @SuppressWarnings("serial") private volatile java.lang.Object diagnosticsInfo_ = "N/A"; /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return Whether the diagnosticsInfo field is set. */ @java.lang.Override public boolean hasDiagnosticsInfo() { return ((bitField0_ & 0x00000040) != 0); } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The diagnosticsInfo. */ @java.lang.Override public java.lang.String getDiagnosticsInfo() { java.lang.Object ref = diagnosticsInfo_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnosticsInfo_ = s; } return s; } } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The bytes for diagnosticsInfo. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsInfoBytes() { java.lang.Object ref = diagnosticsInfo_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnosticsInfo_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LOG_URL_FIELD_NUMBER = 8; @SuppressWarnings("serial") private volatile java.lang.Object logUrl_ = ""; /** * optional string log_url = 8; * @return Whether the logUrl field is set. */ @java.lang.Override public boolean hasLogUrl() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string log_url = 8; * @return The logUrl. */ @java.lang.Override public java.lang.String getLogUrl() { java.lang.Object ref = logUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logUrl_ = s; } return s; } } /** * optional string log_url = 8; * @return The bytes for logUrl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogUrlBytes() { java.lang.Object ref = logUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CONTAINER_EXIT_STATUS_FIELD_NUMBER = 9; private int containerExitStatus_ = 0; /** * optional int32 container_exit_status = 9; * @return Whether the containerExitStatus field is set. */ @java.lang.Override public boolean hasContainerExitStatus() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int32 container_exit_status = 9; * @return The containerExitStatus. */ @java.lang.Override public int getContainerExitStatus() { return containerExitStatus_; } public static final int CONTAINER_STATE_FIELD_NUMBER = 10; private int containerState_ = 1; /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return Whether the containerState field is set. */ @java.lang.Override public boolean hasContainerState() { return ((bitField0_ & 0x00000200) != 0); } /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return The containerState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result; } public static final int NODE_HTTP_ADDRESS_FIELD_NUMBER = 11; @SuppressWarnings("serial") private volatile java.lang.Object nodeHttpAddress_ = ""; /** * optional string node_http_address = 11; * @return Whether the nodeHttpAddress field is set. */ @java.lang.Override public boolean hasNodeHttpAddress() { return ((bitField0_ & 0x00000400) != 0); } /** * optional string node_http_address = 11; * @return The nodeHttpAddress. */ @java.lang.Override public java.lang.String getNodeHttpAddress() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeHttpAddress_ = s; } return s; } } /** * optional string node_http_address = 11; * @return The bytes for nodeHttpAddress. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeHttpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int EXECUTIONTYPE_FIELD_NUMBER = 12; private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000800) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } public static final int EXPOSED_PORTS_FIELD_NUMBER = 13; @SuppressWarnings("serial") private volatile java.lang.Object exposedPorts_ = ""; /** * optional string exposed_ports = 13; * @return Whether the exposedPorts field is set. */ @java.lang.Override public boolean hasExposedPorts() { return ((bitField0_ & 0x00001000) != 0); } /** * optional string exposed_ports = 13; * @return The exposedPorts. */ @java.lang.Override public java.lang.String getExposedPorts() { java.lang.Object ref = exposedPorts_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exposedPorts_ = s; } return s; } } /** * optional string exposed_ports = 13; * @return The bytes for exposedPorts. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes() { java.lang.Object ref = exposedPorts_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exposedPorts_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getResource()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getNodeId()); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getPriority()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeInt64(5, creationTime_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt64(6, finishTime_); } if (((bitField0_ & 0x00000040) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 7, diagnosticsInfo_); } if (((bitField0_ & 0x00000080) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, logUrl_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeInt32(9, containerExitStatus_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeEnum(10, containerState_); } if (((bitField0_ & 0x00000400) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, nodeHttpAddress_); } if (((bitField0_ & 0x00000800) != 0)) { output.writeEnum(12, executionType_); } if (((bitField0_ & 0x00001000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 13, exposedPorts_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getResource()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getNodeId()); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getPriority()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(5, creationTime_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(6, finishTime_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(7, diagnosticsInfo_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, logUrl_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(9, containerExitStatus_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(10, containerState_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, nodeHttpAddress_); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(12, executionType_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(13, exposedPorts_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasCreationTime() != other.hasCreationTime()) return false; if (hasCreationTime()) { if (getCreationTime() != other.getCreationTime()) return false; } if (hasFinishTime() != other.hasFinishTime()) return false; if (hasFinishTime()) { if (getFinishTime() != other.getFinishTime()) return false; } if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false; if (hasDiagnosticsInfo()) { if (!getDiagnosticsInfo() .equals(other.getDiagnosticsInfo())) return false; } if (hasLogUrl() != other.hasLogUrl()) return false; if (hasLogUrl()) { if (!getLogUrl() .equals(other.getLogUrl())) return false; } if (hasContainerExitStatus() != other.hasContainerExitStatus()) return false; if (hasContainerExitStatus()) { if (getContainerExitStatus() != other.getContainerExitStatus()) return false; } if (hasContainerState() != other.hasContainerState()) return false; if (hasContainerState()) { if (containerState_ != other.containerState_) return false; } if (hasNodeHttpAddress() != other.hasNodeHttpAddress()) return false; if (hasNodeHttpAddress()) { if (!getNodeHttpAddress() .equals(other.getNodeHttpAddress())) return false; } if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (executionType_ != other.executionType_) return false; } if (hasExposedPorts() != other.hasExposedPorts()) return false; if (hasExposedPorts()) { if (!getExposedPorts() .equals(other.getExposedPorts())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } if (hasNodeId()) { hash = (37 * hash) + NODE_ID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getCreationTime()); } if (hasFinishTime()) { hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFinishTime()); } if (hasDiagnosticsInfo()) { hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER; hash = (53 * hash) + getDiagnosticsInfo().hashCode(); } if (hasLogUrl()) { hash = (37 * hash) + LOG_URL_FIELD_NUMBER; hash = (53 * hash) + getLogUrl().hashCode(); } if (hasContainerExitStatus()) { hash = (37 * hash) + CONTAINER_EXIT_STATUS_FIELD_NUMBER; hash = (53 * hash) + getContainerExitStatus(); } if (hasContainerState()) { hash = (37 * hash) + CONTAINER_STATE_FIELD_NUMBER; hash = (53 * hash) + containerState_; } if (hasNodeHttpAddress()) { hash = (37 * hash) + NODE_HTTP_ADDRESS_FIELD_NUMBER; hash = (53 * hash) + getNodeHttpAddress().hashCode(); } if (hasExecutionType()) { hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER; hash = (53 * hash) + executionType_; } if (hasExposedPorts()) { hash = (37 * hash) + EXPOSED_PORTS_FIELD_NUMBER; hash = (53 * hash) + getExposedPorts().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerReportProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerReportProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getResourceFieldBuilder(); getNodeIdFieldBuilder(); getPriorityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; containerId_ = null; if (containerIdBuilder_ != null) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; } resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } creationTime_ = 0L; finishTime_ = 0L; diagnosticsInfo_ = "N/A"; logUrl_ = ""; containerExitStatus_ = 0; containerState_ = 1; nodeHttpAddress_ = ""; executionType_ = 1; exposedPorts_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.containerId_ = containerIdBuilder_ == null ? containerId_ : containerIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.nodeId_ = nodeIdBuilder_ == null ? nodeId_ : nodeIdBuilder_.build(); to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.creationTime_ = creationTime_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.finishTime_ = finishTime_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.diagnosticsInfo_ = diagnosticsInfo_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.logUrl_ = logUrl_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.containerExitStatus_ = containerExitStatus_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.containerState_ = containerState_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000400) != 0)) { result.nodeHttpAddress_ = nodeHttpAddress_; to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00000800) != 0)) { result.executionType_ = executionType_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00001000) != 0)) { result.exposedPorts_ = exposedPorts_; to_bitField0_ |= 0x00001000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasResource()) { mergeResource(other.getResource()); } if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasCreationTime()) { setCreationTime(other.getCreationTime()); } if (other.hasFinishTime()) { setFinishTime(other.getFinishTime()); } if (other.hasDiagnosticsInfo()) { diagnosticsInfo_ = other.diagnosticsInfo_; bitField0_ |= 0x00000040; onChanged(); } if (other.hasLogUrl()) { logUrl_ = other.logUrl_; bitField0_ |= 0x00000080; onChanged(); } if (other.hasContainerExitStatus()) { setContainerExitStatus(other.getContainerExitStatus()); } if (other.hasContainerState()) { setContainerState(other.getContainerState()); } if (other.hasNodeHttpAddress()) { nodeHttpAddress_ = other.nodeHttpAddress_; bitField0_ |= 0x00000400; onChanged(); } if (other.hasExecutionType()) { setExecutionType(other.getExecutionType()); } if (other.hasExposedPorts()) { exposedPorts_ = other.exposedPorts_; bitField0_ |= 0x00001000; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getContainerIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage( getNodeIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 case 40: { creationTime_ = input.readInt64(); bitField0_ |= 0x00000010; break; } // case 40 case 48: { finishTime_ = input.readInt64(); bitField0_ |= 0x00000020; break; } // case 48 case 58: { diagnosticsInfo_ = input.readBytes(); bitField0_ |= 0x00000040; break; } // case 58 case 66: { logUrl_ = input.readBytes(); bitField0_ |= 0x00000080; break; } // case 66 case 72: { containerExitStatus_ = input.readInt32(); bitField0_ |= 0x00000100; break; } // case 72 case 80: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(10, tmpRaw); } else { containerState_ = tmpRaw; bitField0_ |= 0x00000200; } break; } // case 80 case 90: { nodeHttpAddress_ = input.readBytes(); bitField0_ |= 0x00000400; break; } // case 90 case 96: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(12, tmpRaw); } else { executionType_ = tmpRaw; bitField0_ |= 0x00000800; } break; } // case 96 case 106: { exposedPorts_ = input.readBytes(); bitField0_ |= 0x00001000; break; } // case 106 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { getContainerIdBuilder().mergeFrom(value); } else { containerId_ = value; } } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { bitField0_ = (bitField0_ & ~0x00000001); containerId_ = null; if (containerIdBuilder_ != null) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 2; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000002); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceProto resource = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return Whether the nodeId field is set. */ public boolean hasNodeId() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; * @return The nodeId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { getNodeIdBuilder().mergeFrom(value); } else { nodeId_ = value; } } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public Builder clearNodeId() { bitField0_ = (bitField0_ & ~0x00000004); nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000004; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto node_id = 3; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000008); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000008; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private long creationTime_ ; /** * optional int64 creation_time = 5; * @return Whether the creationTime field is set. */ @java.lang.Override public boolean hasCreationTime() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 creation_time = 5; * @return The creationTime. */ @java.lang.Override public long getCreationTime() { return creationTime_; } /** * optional int64 creation_time = 5; * @param value The creationTime to set. * @return This builder for chaining. */ public Builder setCreationTime(long value) { creationTime_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional int64 creation_time = 5; * @return This builder for chaining. */ public Builder clearCreationTime() { bitField0_ = (bitField0_ & ~0x00000010); creationTime_ = 0L; onChanged(); return this; } private long finishTime_ ; /** * optional int64 finish_time = 6; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 finish_time = 6; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } /** * optional int64 finish_time = 6; * @param value The finishTime to set. * @return This builder for chaining. */ public Builder setFinishTime(long value) { finishTime_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int64 finish_time = 6; * @return This builder for chaining. */ public Builder clearFinishTime() { bitField0_ = (bitField0_ & ~0x00000020); finishTime_ = 0L; onChanged(); return this; } private java.lang.Object diagnosticsInfo_ = "N/A"; /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return Whether the diagnosticsInfo field is set. */ public boolean hasDiagnosticsInfo() { return ((bitField0_ & 0x00000040) != 0); } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The diagnosticsInfo. */ public java.lang.String getDiagnosticsInfo() { java.lang.Object ref = diagnosticsInfo_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnosticsInfo_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return The bytes for diagnosticsInfo. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsInfoBytes() { java.lang.Object ref = diagnosticsInfo_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnosticsInfo_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @param value The diagnosticsInfo to set. * @return This builder for chaining. */ public Builder setDiagnosticsInfo( java.lang.String value) { if (value == null) { throw new NullPointerException(); } diagnosticsInfo_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @return This builder for chaining. */ public Builder clearDiagnosticsInfo() { diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } /** * optional string diagnostics_info = 7 [default = "N/A"]; * @param value The bytes for diagnosticsInfo to set. * @return This builder for chaining. */ public Builder setDiagnosticsInfoBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } diagnosticsInfo_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } private java.lang.Object logUrl_ = ""; /** * optional string log_url = 8; * @return Whether the logUrl field is set. */ public boolean hasLogUrl() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string log_url = 8; * @return The logUrl. */ public java.lang.String getLogUrl() { java.lang.Object ref = logUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string log_url = 8; * @return The bytes for logUrl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogUrlBytes() { java.lang.Object ref = logUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string log_url = 8; * @param value The logUrl to set. * @return This builder for chaining. */ public Builder setLogUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } logUrl_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional string log_url = 8; * @return This builder for chaining. */ public Builder clearLogUrl() { logUrl_ = getDefaultInstance().getLogUrl(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * optional string log_url = 8; * @param value The bytes for logUrl to set. * @return This builder for chaining. */ public Builder setLogUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } logUrl_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } private int containerExitStatus_ ; /** * optional int32 container_exit_status = 9; * @return Whether the containerExitStatus field is set. */ @java.lang.Override public boolean hasContainerExitStatus() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int32 container_exit_status = 9; * @return The containerExitStatus. */ @java.lang.Override public int getContainerExitStatus() { return containerExitStatus_; } /** * optional int32 container_exit_status = 9; * @param value The containerExitStatus to set. * @return This builder for chaining. */ public Builder setContainerExitStatus(int value) { containerExitStatus_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional int32 container_exit_status = 9; * @return This builder for chaining. */ public Builder clearContainerExitStatus() { bitField0_ = (bitField0_ & ~0x00000100); containerExitStatus_ = 0; onChanged(); return this; } private int containerState_ = 1; /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return Whether the containerState field is set. */ @java.lang.Override public boolean hasContainerState() { return ((bitField0_ & 0x00000200) != 0); } /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return The containerState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result; } /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @param value The containerState to set. * @return This builder for chaining. */ public Builder setContainerState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000200; containerState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ContainerStateProto container_state = 10; * @return This builder for chaining. */ public Builder clearContainerState() { bitField0_ = (bitField0_ & ~0x00000200); containerState_ = 1; onChanged(); return this; } private java.lang.Object nodeHttpAddress_ = ""; /** * optional string node_http_address = 11; * @return Whether the nodeHttpAddress field is set. */ public boolean hasNodeHttpAddress() { return ((bitField0_ & 0x00000400) != 0); } /** * optional string node_http_address = 11; * @return The nodeHttpAddress. */ public java.lang.String getNodeHttpAddress() { java.lang.Object ref = nodeHttpAddress_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeHttpAddress_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string node_http_address = 11; * @return The bytes for nodeHttpAddress. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeHttpAddressBytes() { java.lang.Object ref = nodeHttpAddress_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeHttpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string node_http_address = 11; * @param value The nodeHttpAddress to set. * @return This builder for chaining. */ public Builder setNodeHttpAddress( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodeHttpAddress_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional string node_http_address = 11; * @return This builder for chaining. */ public Builder clearNodeHttpAddress() { nodeHttpAddress_ = getDefaultInstance().getNodeHttpAddress(); bitField0_ = (bitField0_ & ~0x00000400); onChanged(); return this; } /** * optional string node_http_address = 11; * @param value The bytes for nodeHttpAddress to set. * @return This builder for chaining. */ public Builder setNodeHttpAddressBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nodeHttpAddress_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000800) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @param value The executionType to set. * @return This builder for chaining. */ public Builder setExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000800; executionType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED]; * @return This builder for chaining. */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000800); executionType_ = 1; onChanged(); return this; } private java.lang.Object exposedPorts_ = ""; /** * optional string exposed_ports = 13; * @return Whether the exposedPorts field is set. */ public boolean hasExposedPorts() { return ((bitField0_ & 0x00001000) != 0); } /** * optional string exposed_ports = 13; * @return The exposedPorts. */ public java.lang.String getExposedPorts() { java.lang.Object ref = exposedPorts_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exposedPorts_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string exposed_ports = 13; * @return The bytes for exposedPorts. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExposedPortsBytes() { java.lang.Object ref = exposedPorts_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exposedPorts_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string exposed_ports = 13; * @param value The exposedPorts to set. * @return This builder for chaining. */ public Builder setExposedPorts( java.lang.String value) { if (value == null) { throw new NullPointerException(); } exposedPorts_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } /** * optional string exposed_ports = 13; * @return This builder for chaining. */ public Builder clearExposedPorts() { exposedPorts_ = getDefaultInstance().getExposedPorts(); bitField0_ = (bitField0_ & ~0x00001000); onChanged(); return this; } /** * optional string exposed_ports = 13; * @param value The bytes for exposedPorts to set. * @return This builder for chaining. */ public Builder setExposedPortsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } exposedPorts_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerReportProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerReportProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerReportProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface URLProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.URLProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string scheme = 1; * @return Whether the scheme field is set. */ boolean hasScheme(); /** * optional string scheme = 1; * @return The scheme. */ java.lang.String getScheme(); /** * optional string scheme = 1; * @return The bytes for scheme. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSchemeBytes(); /** * optional string host = 2; * @return Whether the host field is set. */ boolean hasHost(); /** * optional string host = 2; * @return The host. */ java.lang.String getHost(); /** * optional string host = 2; * @return The bytes for host. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes(); /** * optional int32 port = 3; * @return Whether the port field is set. */ boolean hasPort(); /** * optional int32 port = 3; * @return The port. */ int getPort(); /** * optional string file = 4; * @return Whether the file field is set. */ boolean hasFile(); /** * optional string file = 4; * @return The file. */ java.lang.String getFile(); /** * optional string file = 4; * @return The bytes for file. */ org.apache.hadoop.thirdparty.protobuf.ByteString getFileBytes(); /** * optional string userInfo = 5; * @return Whether the userInfo field is set. */ boolean hasUserInfo(); /** * optional string userInfo = 5; * @return The userInfo. */ java.lang.String getUserInfo(); /** * optional string userInfo = 5; * @return The bytes for userInfo. */ org.apache.hadoop.thirdparty.protobuf.ByteString getUserInfoBytes(); } /** * Protobuf type {@code hadoop.yarn.URLProto} */ public static final class URLProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.URLProto) URLProtoOrBuilder { private static final long serialVersionUID = 0L; // Use URLProto.newBuilder() to construct. private URLProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private URLProto() { scheme_ = ""; host_ = ""; file_ = ""; userInfo_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new URLProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.URLProto.class, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder.class); } private int bitField0_; public static final int SCHEME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object scheme_ = ""; /** * optional string scheme = 1; * @return Whether the scheme field is set. */ @java.lang.Override public boolean hasScheme() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string scheme = 1; * @return The scheme. */ @java.lang.Override public java.lang.String getScheme() { java.lang.Object ref = scheme_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { scheme_ = s; } return s; } } /** * optional string scheme = 1; * @return The bytes for scheme. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSchemeBytes() { java.lang.Object ref = scheme_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scheme_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int HOST_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object host_ = ""; /** * optional string host = 2; * @return Whether the host field is set. */ @java.lang.Override public boolean hasHost() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string host = 2; * @return The host. */ @java.lang.Override public java.lang.String getHost() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } } /** * optional string host = 2; * @return The bytes for host. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int PORT_FIELD_NUMBER = 3; private int port_ = 0; /** * optional int32 port = 3; * @return Whether the port field is set. */ @java.lang.Override public boolean hasPort() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 port = 3; * @return The port. */ @java.lang.Override public int getPort() { return port_; } public static final int FILE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object file_ = ""; /** * optional string file = 4; * @return Whether the file field is set. */ @java.lang.Override public boolean hasFile() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string file = 4; * @return The file. */ @java.lang.Override public java.lang.String getFile() { java.lang.Object ref = file_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { file_ = s; } return s; } } /** * optional string file = 4; * @return The bytes for file. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getFileBytes() { java.lang.Object ref = file_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); file_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int USERINFO_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object userInfo_ = ""; /** * optional string userInfo = 5; * @return Whether the userInfo field is set. */ @java.lang.Override public boolean hasUserInfo() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string userInfo = 5; * @return The userInfo. */ @java.lang.Override public java.lang.String getUserInfo() { java.lang.Object ref = userInfo_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { userInfo_ = s; } return s; } } /** * optional string userInfo = 5; * @return The bytes for userInfo. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserInfoBytes() { java.lang.Object ref = userInfo_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); userInfo_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, scheme_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(3, port_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, file_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, userInfo_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, scheme_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, port_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, file_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, userInfo_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.URLProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.URLProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto) obj; if (hasScheme() != other.hasScheme()) return false; if (hasScheme()) { if (!getScheme() .equals(other.getScheme())) return false; } if (hasHost() != other.hasHost()) return false; if (hasHost()) { if (!getHost() .equals(other.getHost())) return false; } if (hasPort() != other.hasPort()) return false; if (hasPort()) { if (getPort() != other.getPort()) return false; } if (hasFile() != other.hasFile()) return false; if (hasFile()) { if (!getFile() .equals(other.getFile())) return false; } if (hasUserInfo() != other.hasUserInfo()) return false; if (hasUserInfo()) { if (!getUserInfo() .equals(other.getUserInfo())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasScheme()) { hash = (37 * hash) + SCHEME_FIELD_NUMBER; hash = (53 * hash) + getScheme().hashCode(); } if (hasHost()) { hash = (37 * hash) + HOST_FIELD_NUMBER; hash = (53 * hash) + getHost().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasFile()) { hash = (37 * hash) + FILE_FIELD_NUMBER; hash = (53 * hash) + getFile().hashCode(); } if (hasUserInfo()) { hash = (37 * hash) + USERINFO_FIELD_NUMBER; hash = (53 * hash) + getUserInfo().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.URLProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.URLProto) org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.URLProto.class, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.URLProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; scheme_ = ""; host_ = ""; port_ = 0; file_ = ""; userInfo_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto build() { org.apache.hadoop.yarn.proto.YarnProtos.URLProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.URLProto result = new org.apache.hadoop.yarn.proto.YarnProtos.URLProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.scheme_ = scheme_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.host_ = host_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.port_ = port_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.file_ = file_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.userInfo_ = userInfo_; to_bitField0_ |= 0x00000010; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.URLProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance()) return this; if (other.hasScheme()) { scheme_ = other.scheme_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasHost()) { host_ = other.host_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasFile()) { file_ = other.file_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasUserInfo()) { userInfo_ = other.userInfo_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { scheme_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { host_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { port_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { file_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { userInfo_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object scheme_ = ""; /** * optional string scheme = 1; * @return Whether the scheme field is set. */ public boolean hasScheme() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string scheme = 1; * @return The scheme. */ public java.lang.String getScheme() { java.lang.Object ref = scheme_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { scheme_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string scheme = 1; * @return The bytes for scheme. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSchemeBytes() { java.lang.Object ref = scheme_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scheme_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string scheme = 1; * @param value The scheme to set. * @return This builder for chaining. */ public Builder setScheme( java.lang.String value) { if (value == null) { throw new NullPointerException(); } scheme_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string scheme = 1; * @return This builder for chaining. */ public Builder clearScheme() { scheme_ = getDefaultInstance().getScheme(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string scheme = 1; * @param value The bytes for scheme to set. * @return This builder for chaining. */ public Builder setSchemeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } scheme_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object host_ = ""; /** * optional string host = 2; * @return Whether the host field is set. */ public boolean hasHost() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string host = 2; * @return The host. */ public java.lang.String getHost() { java.lang.Object ref = host_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string host = 2; * @return The bytes for host. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string host = 2; * @param value The host to set. * @return This builder for chaining. */ public Builder setHost( java.lang.String value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string host = 2; * @return This builder for chaining. */ public Builder clearHost() { host_ = getDefaultInstance().getHost(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string host = 2; * @param value The bytes for host to set. * @return This builder for chaining. */ public Builder setHostBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int port_ ; /** * optional int32 port = 3; * @return Whether the port field is set. */ @java.lang.Override public boolean hasPort() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 port = 3; * @return The port. */ @java.lang.Override public int getPort() { return port_; } /** * optional int32 port = 3; * @param value The port to set. * @return This builder for chaining. */ public Builder setPort(int value) { port_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 port = 3; * @return This builder for chaining. */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000004); port_ = 0; onChanged(); return this; } private java.lang.Object file_ = ""; /** * optional string file = 4; * @return Whether the file field is set. */ public boolean hasFile() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string file = 4; * @return The file. */ public java.lang.String getFile() { java.lang.Object ref = file_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { file_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string file = 4; * @return The bytes for file. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getFileBytes() { java.lang.Object ref = file_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); file_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string file = 4; * @param value The file to set. * @return This builder for chaining. */ public Builder setFile( java.lang.String value) { if (value == null) { throw new NullPointerException(); } file_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string file = 4; * @return This builder for chaining. */ public Builder clearFile() { file_ = getDefaultInstance().getFile(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string file = 4; * @param value The bytes for file to set. * @return This builder for chaining. */ public Builder setFileBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } file_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object userInfo_ = ""; /** * optional string userInfo = 5; * @return Whether the userInfo field is set. */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string userInfo = 5; * @return The userInfo. */ public java.lang.String getUserInfo() { java.lang.Object ref = userInfo_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { userInfo_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string userInfo = 5; * @return The bytes for userInfo. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserInfoBytes() { java.lang.Object ref = userInfo_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); userInfo_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string userInfo = 5; * @param value The userInfo to set. * @return This builder for chaining. */ public Builder setUserInfo( java.lang.String value) { if (value == null) { throw new NullPointerException(); } userInfo_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string userInfo = 5; * @return This builder for chaining. */ public Builder clearUserInfo() { userInfo_ = getDefaultInstance().getUserInfo(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string userInfo = 5; * @param value The bytes for userInfo to set. * @return This builder for chaining. */ public Builder setUserInfoBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } userInfo_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.URLProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.URLProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.URLProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public URLProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface LocalResourceProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.LocalResourceProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.URLProto resource = 1; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.URLProto resource = 1; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource(); /** * optional .hadoop.yarn.URLProto resource = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder(); /** * optional int64 size = 2; * @return Whether the size field is set. */ boolean hasSize(); /** * optional int64 size = 2; * @return The size. */ long getSize(); /** * optional int64 timestamp = 3; * @return Whether the timestamp field is set. */ boolean hasTimestamp(); /** * optional int64 timestamp = 3; * @return The timestamp. */ long getTimestamp(); /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return Whether the type field is set. */ boolean hasType(); /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return The type. */ org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType(); /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return Whether the visibility field is set. */ boolean hasVisibility(); /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return The visibility. */ org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility(); /** * optional string pattern = 6; * @return Whether the pattern field is set. */ boolean hasPattern(); /** * optional string pattern = 6; * @return The pattern. */ java.lang.String getPattern(); /** * optional string pattern = 6; * @return The bytes for pattern. */ org.apache.hadoop.thirdparty.protobuf.ByteString getPatternBytes(); /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return Whether the shouldBeUploadedToSharedCache field is set. */ boolean hasShouldBeUploadedToSharedCache(); /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return The shouldBeUploadedToSharedCache. */ boolean getShouldBeUploadedToSharedCache(); } /** * Protobuf type {@code hadoop.yarn.LocalResourceProto} */ public static final class LocalResourceProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.LocalResourceProto) LocalResourceProtoOrBuilder { private static final long serialVersionUID = 0L; // Use LocalResourceProto.newBuilder() to construct. private LocalResourceProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private LocalResourceProto() { type_ = 1; visibility_ = 1; pattern_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LocalResourceProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder.class); } private int bitField0_; public static final int RESOURCE_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto resource_; /** * optional .hadoop.yarn.URLProto resource = 1; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.URLProto resource = 1; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.URLProto resource = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_; } public static final int SIZE_FIELD_NUMBER = 2; private long size_ = 0L; /** * optional int64 size = 2; * @return Whether the size field is set. */ @java.lang.Override public boolean hasSize() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 size = 2; * @return The size. */ @java.lang.Override public long getSize() { return size_; } public static final int TIMESTAMP_FIELD_NUMBER = 3; private long timestamp_ = 0L; /** * optional int64 timestamp = 3; * @return Whether the timestamp field is set. */ @java.lang.Override public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 timestamp = 3; * @return The timestamp. */ @java.lang.Override public long getTimestamp() { return timestamp_; } public static final int TYPE_FIELD_NUMBER = 4; private int type_ = 1; /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.ARCHIVE : result; } public static final int VISIBILITY_FIELD_NUMBER = 5; private int visibility_ = 1; /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return Whether the visibility field is set. */ @java.lang.Override public boolean hasVisibility() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return The visibility. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(visibility_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.PUBLIC : result; } public static final int PATTERN_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object pattern_ = ""; /** * optional string pattern = 6; * @return Whether the pattern field is set. */ @java.lang.Override public boolean hasPattern() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string pattern = 6; * @return The pattern. */ @java.lang.Override public java.lang.String getPattern() { java.lang.Object ref = pattern_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { pattern_ = s; } return s; } } /** * optional string pattern = 6; * @return The bytes for pattern. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPatternBytes() { java.lang.Object ref = pattern_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int SHOULD_BE_UPLOADED_TO_SHARED_CACHE_FIELD_NUMBER = 7; private boolean shouldBeUploadedToSharedCache_ = false; /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return Whether the shouldBeUploadedToSharedCache field is set. */ @java.lang.Override public boolean hasShouldBeUploadedToSharedCache() { return ((bitField0_ & 0x00000040) != 0); } /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return The shouldBeUploadedToSharedCache. */ @java.lang.Override public boolean getShouldBeUploadedToSharedCache() { return shouldBeUploadedToSharedCache_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getResource()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, size_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(3, timestamp_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeEnum(4, type_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeEnum(5, visibility_); } if (((bitField0_ & 0x00000020) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, pattern_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeBool(7, shouldBeUploadedToSharedCache_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getResource()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, size_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, timestamp_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(4, type_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(5, visibility_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, pattern_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(7, shouldBeUploadedToSharedCache_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto) obj; if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (hasSize() != other.hasSize()) return false; if (hasSize()) { if (getSize() != other.getSize()) return false; } if (hasTimestamp() != other.hasTimestamp()) return false; if (hasTimestamp()) { if (getTimestamp() != other.getTimestamp()) return false; } if (hasType() != other.hasType()) return false; if (hasType()) { if (type_ != other.type_) return false; } if (hasVisibility() != other.hasVisibility()) return false; if (hasVisibility()) { if (visibility_ != other.visibility_) return false; } if (hasPattern() != other.hasPattern()) return false; if (hasPattern()) { if (!getPattern() .equals(other.getPattern())) return false; } if (hasShouldBeUploadedToSharedCache() != other.hasShouldBeUploadedToSharedCache()) return false; if (hasShouldBeUploadedToSharedCache()) { if (getShouldBeUploadedToSharedCache() != other.getShouldBeUploadedToSharedCache()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } if (hasSize()) { hash = (37 * hash) + SIZE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getSize()); } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getTimestamp()); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } if (hasVisibility()) { hash = (37 * hash) + VISIBILITY_FIELD_NUMBER; hash = (53 * hash) + visibility_; } if (hasPattern()) { hash = (37 * hash) + PATTERN_FIELD_NUMBER; hash = (53 * hash) + getPattern().hashCode(); } if (hasShouldBeUploadedToSharedCache()) { hash = (37 * hash) + SHOULD_BE_UPLOADED_TO_SHARED_CACHE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getShouldBeUploadedToSharedCache()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.LocalResourceProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.LocalResourceProto) org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } size_ = 0L; timestamp_ = 0L; type_ = 1; visibility_ = 1; pattern_ = ""; shouldBeUploadedToSharedCache_ = false; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto build() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.size_ = size_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.timestamp_ = timestamp_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.type_ = type_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.visibility_ = visibility_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.pattern_ = pattern_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.shouldBeUploadedToSharedCache_ = shouldBeUploadedToSharedCache_; to_bitField0_ |= 0x00000040; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance()) return this; if (other.hasResource()) { mergeResource(other.getResource()); } if (other.hasSize()) { setSize(other.getSize()); } if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } if (other.hasType()) { setType(other.getType()); } if (other.hasVisibility()) { setVisibility(other.getVisibility()); } if (other.hasPattern()) { pattern_ = other.pattern_; bitField0_ |= 0x00000020; onChanged(); } if (other.hasShouldBeUploadedToSharedCache()) { setShouldBeUploadedToSharedCache(other.getShouldBeUploadedToSharedCache()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { size_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { timestamp_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(4, tmpRaw); } else { type_ = tmpRaw; bitField0_ |= 0x00000008; } break; } // case 32 case 40: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(5, tmpRaw); } else { visibility_ = tmpRaw; bitField0_ |= 0x00000010; } break; } // case 40 case 50: { pattern_ = input.readBytes(); bitField0_ |= 0x00000020; break; } // case 50 case 56: { shouldBeUploadedToSharedCache_ = input.readBool(); bitField0_ |= 0x00000040; break; } // case 56 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.URLProto resource = 1; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.URLProto resource = 1; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.URLProto resource = 1; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.URLProto resource = 1; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.URLProto resource = 1; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.URLProto resource = 1; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000001); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.URLProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder getResourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.URLProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.URLProto resource = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private long size_ ; /** * optional int64 size = 2; * @return Whether the size field is set. */ @java.lang.Override public boolean hasSize() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 size = 2; * @return The size. */ @java.lang.Override public long getSize() { return size_; } /** * optional int64 size = 2; * @param value The size to set. * @return This builder for chaining. */ public Builder setSize(long value) { size_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 size = 2; * @return This builder for chaining. */ public Builder clearSize() { bitField0_ = (bitField0_ & ~0x00000002); size_ = 0L; onChanged(); return this; } private long timestamp_ ; /** * optional int64 timestamp = 3; * @return Whether the timestamp field is set. */ @java.lang.Override public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 timestamp = 3; * @return The timestamp. */ @java.lang.Override public long getTimestamp() { return timestamp_; } /** * optional int64 timestamp = 3; * @param value The timestamp to set. * @return This builder for chaining. */ public Builder setTimestamp(long value) { timestamp_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 timestamp = 3; * @return This builder for chaining. */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000004); timestamp_ = 0L; onChanged(); return this; } private int type_ = 1; /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return Whether the type field is set. */ @java.lang.Override public boolean hasType() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return The type. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(type_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.ARCHIVE : result; } /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceTypeProto type = 4; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = 1; onChanged(); return this; } private int visibility_ = 1; /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return Whether the visibility field is set. */ @java.lang.Override public boolean hasVisibility() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return The visibility. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility() { org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(visibility_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.PUBLIC : result; } /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @param value The visibility to set. * @return This builder for chaining. */ public Builder setVisibility(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; visibility_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5; * @return This builder for chaining. */ public Builder clearVisibility() { bitField0_ = (bitField0_ & ~0x00000010); visibility_ = 1; onChanged(); return this; } private java.lang.Object pattern_ = ""; /** * optional string pattern = 6; * @return Whether the pattern field is set. */ public boolean hasPattern() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string pattern = 6; * @return The pattern. */ public java.lang.String getPattern() { java.lang.Object ref = pattern_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { pattern_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string pattern = 6; * @return The bytes for pattern. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPatternBytes() { java.lang.Object ref = pattern_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string pattern = 6; * @param value The pattern to set. * @return This builder for chaining. */ public Builder setPattern( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pattern_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional string pattern = 6; * @return This builder for chaining. */ public Builder clearPattern() { pattern_ = getDefaultInstance().getPattern(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } /** * optional string pattern = 6; * @param value The bytes for pattern to set. * @return This builder for chaining. */ public Builder setPatternBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } pattern_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } private boolean shouldBeUploadedToSharedCache_ ; /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return Whether the shouldBeUploadedToSharedCache field is set. */ @java.lang.Override public boolean hasShouldBeUploadedToSharedCache() { return ((bitField0_ & 0x00000040) != 0); } /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return The shouldBeUploadedToSharedCache. */ @java.lang.Override public boolean getShouldBeUploadedToSharedCache() { return shouldBeUploadedToSharedCache_; } /** * optional bool should_be_uploaded_to_shared_cache = 7; * @param value The shouldBeUploadedToSharedCache to set. * @return This builder for chaining. */ public Builder setShouldBeUploadedToSharedCache(boolean value) { shouldBeUploadedToSharedCache_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional bool should_be_uploaded_to_shared_cache = 7; * @return This builder for chaining. */ public Builder clearShouldBeUploadedToSharedCache() { bitField0_ = (bitField0_ & ~0x00000040); shouldBeUploadedToSharedCache_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.LocalResourceProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.LocalResourceProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public LocalResourceProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StringLongMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringLongMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * required string key = 1; * @return The key. */ java.lang.String getKey(); /** * required string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * required int64 value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * required int64 value = 2; * @return The value. */ long getValue(); } /** * Protobuf type {@code hadoop.yarn.StringLongMapProto} */ public static final class StringLongMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StringLongMapProto) StringLongMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StringLongMapProto.newBuilder() to construct. private StringLongMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringLongMapProto() { key_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringLongMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * required string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private long value_ = 0L; /** * required int64 value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required int64 value = 2; * @return The value. */ @java.lang.Override public long getValue() { return value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasKey()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (getValue() != other.getValue()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StringLongMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringLongMapProto) org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = value_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasKey()) { return false; } if (!hasValue()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { value_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private long value_ ; /** * required int64 value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required int64 value = 2; * @return The value. */ @java.lang.Override public long getValue() { return value_; } /** * required int64 value = 2; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(long value) { value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * required int64 value = 2; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringLongMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StringLongMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StringLongMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StringFloatMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringFloatMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * required string key = 1; * @return The key. */ java.lang.String getKey(); /** * required string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * required float value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * required float value = 2; * @return The value. */ float getValue(); } /** * Protobuf type {@code hadoop.yarn.StringFloatMapProto} */ public static final class StringFloatMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StringFloatMapProto) StringFloatMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StringFloatMapProto.newBuilder() to construct. private StringFloatMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringFloatMapProto() { key_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringFloatMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * required string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private float value_ = 0F; /** * required float value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required float value = 2; * @return The value. */ @java.lang.Override public float getValue() { return value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasKey()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeFloat(2, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (java.lang.Float.floatToIntBits(getValue()) != java.lang.Float.floatToIntBits( other.getValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StringFloatMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringFloatMapProto) org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = 0F; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = value_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasKey()) { return false; } if (!hasValue()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 21: { value_ = input.readFloat(); bitField0_ |= 0x00000002; break; } // case 21 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * required string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private float value_ ; /** * required float value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required float value = 2; * @return The value. */ @java.lang.Override public float getValue() { return value_; } /** * required float value = 2; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(float value) { value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * required float value = 2; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0F; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringFloatMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StringFloatMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StringFloatMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationResourceUsageReportProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationResourceUsageReportProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 num_used_containers = 1; * @return Whether the numUsedContainers field is set. */ boolean hasNumUsedContainers(); /** * optional int32 num_used_containers = 1; * @return The numUsedContainers. */ int getNumUsedContainers(); /** * optional int32 num_reserved_containers = 2; * @return Whether the numReservedContainers field is set. */ boolean hasNumReservedContainers(); /** * optional int32 num_reserved_containers = 2; * @return The numReservedContainers. */ int getNumReservedContainers(); /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return Whether the usedResources field is set. */ boolean hasUsedResources(); /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return The usedResources. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources(); /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder(); /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return Whether the reservedResources field is set. */ boolean hasReservedResources(); /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return The reservedResources. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources(); /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder(); /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return Whether the neededResources field is set. */ boolean hasNeededResources(); /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return The neededResources. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources(); /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder(); /** * optional int64 memory_seconds = 6; * @return Whether the memorySeconds field is set. */ boolean hasMemorySeconds(); /** * optional int64 memory_seconds = 6; * @return The memorySeconds. */ long getMemorySeconds(); /** * optional int64 vcore_seconds = 7; * @return Whether the vcoreSeconds field is set. */ boolean hasVcoreSeconds(); /** * optional int64 vcore_seconds = 7; * @return The vcoreSeconds. */ long getVcoreSeconds(); /** * optional float queue_usage_percentage = 8; * @return Whether the queueUsagePercentage field is set. */ boolean hasQueueUsagePercentage(); /** * optional float queue_usage_percentage = 8; * @return The queueUsagePercentage. */ float getQueueUsagePercentage(); /** * optional float cluster_usage_percentage = 9; * @return Whether the clusterUsagePercentage field is set. */ boolean hasClusterUsagePercentage(); /** * optional float cluster_usage_percentage = 9; * @return The clusterUsagePercentage. */ float getClusterUsagePercentage(); /** * optional int64 preempted_memory_seconds = 10; * @return Whether the preemptedMemorySeconds field is set. */ boolean hasPreemptedMemorySeconds(); /** * optional int64 preempted_memory_seconds = 10; * @return The preemptedMemorySeconds. */ long getPreemptedMemorySeconds(); /** * optional int64 preempted_vcore_seconds = 11; * @return Whether the preemptedVcoreSeconds field is set. */ boolean hasPreemptedVcoreSeconds(); /** * optional int64 preempted_vcore_seconds = 11; * @return The preemptedVcoreSeconds. */ long getPreemptedVcoreSeconds(); /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ java.util.List getApplicationResourceUsageMapList(); /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index); /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ int getApplicationResourceUsageMapCount(); /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ java.util.List getApplicationResourceUsageMapOrBuilderList(); /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder( int index); /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ java.util.List getApplicationPreemptedResourceUsageMapList(); /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index); /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ int getApplicationPreemptedResourceUsageMapCount(); /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ java.util.List getApplicationPreemptedResourceUsageMapOrBuilderList(); /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.ApplicationResourceUsageReportProto} */ public static final class ApplicationResourceUsageReportProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationResourceUsageReportProto) ApplicationResourceUsageReportProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationResourceUsageReportProto.newBuilder() to construct. private ApplicationResourceUsageReportProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationResourceUsageReportProto() { applicationResourceUsageMap_ = java.util.Collections.emptyList(); applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationResourceUsageReportProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder.class); } private int bitField0_; public static final int NUM_USED_CONTAINERS_FIELD_NUMBER = 1; private int numUsedContainers_ = 0; /** * optional int32 num_used_containers = 1; * @return Whether the numUsedContainers field is set. */ @java.lang.Override public boolean hasNumUsedContainers() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 num_used_containers = 1; * @return The numUsedContainers. */ @java.lang.Override public int getNumUsedContainers() { return numUsedContainers_; } public static final int NUM_RESERVED_CONTAINERS_FIELD_NUMBER = 2; private int numReservedContainers_ = 0; /** * optional int32 num_reserved_containers = 2; * @return Whether the numReservedContainers field is set. */ @java.lang.Override public boolean hasNumReservedContainers() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_reserved_containers = 2; * @return The numReservedContainers. */ @java.lang.Override public int getNumReservedContainers() { return numReservedContainers_; } public static final int USED_RESOURCES_FIELD_NUMBER = 3; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto usedResources_; /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return Whether the usedResources field is set. */ @java.lang.Override public boolean hasUsedResources() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return The usedResources. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources() { return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_; } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder() { return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_; } public static final int RESERVED_RESOURCES_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto reservedResources_; /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return Whether the reservedResources field is set. */ @java.lang.Override public boolean hasReservedResources() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return The reservedResources. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources() { return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_; } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder() { return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_; } public static final int NEEDED_RESOURCES_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto neededResources_; /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return Whether the neededResources field is set. */ @java.lang.Override public boolean hasNeededResources() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return The neededResources. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources() { return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_; } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder() { return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_; } public static final int MEMORY_SECONDS_FIELD_NUMBER = 6; private long memorySeconds_ = 0L; /** * optional int64 memory_seconds = 6; * @return Whether the memorySeconds field is set. */ @java.lang.Override public boolean hasMemorySeconds() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 memory_seconds = 6; * @return The memorySeconds. */ @java.lang.Override public long getMemorySeconds() { return memorySeconds_; } public static final int VCORE_SECONDS_FIELD_NUMBER = 7; private long vcoreSeconds_ = 0L; /** * optional int64 vcore_seconds = 7; * @return Whether the vcoreSeconds field is set. */ @java.lang.Override public boolean hasVcoreSeconds() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 vcore_seconds = 7; * @return The vcoreSeconds. */ @java.lang.Override public long getVcoreSeconds() { return vcoreSeconds_; } public static final int QUEUE_USAGE_PERCENTAGE_FIELD_NUMBER = 8; private float queueUsagePercentage_ = 0F; /** * optional float queue_usage_percentage = 8; * @return Whether the queueUsagePercentage field is set. */ @java.lang.Override public boolean hasQueueUsagePercentage() { return ((bitField0_ & 0x00000080) != 0); } /** * optional float queue_usage_percentage = 8; * @return The queueUsagePercentage. */ @java.lang.Override public float getQueueUsagePercentage() { return queueUsagePercentage_; } public static final int CLUSTER_USAGE_PERCENTAGE_FIELD_NUMBER = 9; private float clusterUsagePercentage_ = 0F; /** * optional float cluster_usage_percentage = 9; * @return Whether the clusterUsagePercentage field is set. */ @java.lang.Override public boolean hasClusterUsagePercentage() { return ((bitField0_ & 0x00000100) != 0); } /** * optional float cluster_usage_percentage = 9; * @return The clusterUsagePercentage. */ @java.lang.Override public float getClusterUsagePercentage() { return clusterUsagePercentage_; } public static final int PREEMPTED_MEMORY_SECONDS_FIELD_NUMBER = 10; private long preemptedMemorySeconds_ = 0L; /** * optional int64 preempted_memory_seconds = 10; * @return Whether the preemptedMemorySeconds field is set. */ @java.lang.Override public boolean hasPreemptedMemorySeconds() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 preempted_memory_seconds = 10; * @return The preemptedMemorySeconds. */ @java.lang.Override public long getPreemptedMemorySeconds() { return preemptedMemorySeconds_; } public static final int PREEMPTED_VCORE_SECONDS_FIELD_NUMBER = 11; private long preemptedVcoreSeconds_ = 0L; /** * optional int64 preempted_vcore_seconds = 11; * @return Whether the preemptedVcoreSeconds field is set. */ @java.lang.Override public boolean hasPreemptedVcoreSeconds() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 preempted_vcore_seconds = 11; * @return The preemptedVcoreSeconds. */ @java.lang.Override public long getPreemptedVcoreSeconds() { return preemptedVcoreSeconds_; } public static final int APPLICATION_RESOURCE_USAGE_MAP_FIELD_NUMBER = 12; @SuppressWarnings("serial") private java.util.List applicationResourceUsageMap_; /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ @java.lang.Override public java.util.List getApplicationResourceUsageMapList() { return applicationResourceUsageMap_; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ @java.lang.Override public java.util.List getApplicationResourceUsageMapOrBuilderList() { return applicationResourceUsageMap_; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ @java.lang.Override public int getApplicationResourceUsageMapCount() { return applicationResourceUsageMap_.size(); } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index) { return applicationResourceUsageMap_.get(index); } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder( int index) { return applicationResourceUsageMap_.get(index); } public static final int APPLICATION_PREEMPTED_RESOURCE_USAGE_MAP_FIELD_NUMBER = 13; @SuppressWarnings("serial") private java.util.List applicationPreemptedResourceUsageMap_; /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ @java.lang.Override public java.util.List getApplicationPreemptedResourceUsageMapList() { return applicationPreemptedResourceUsageMap_; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ @java.lang.Override public java.util.List getApplicationPreemptedResourceUsageMapOrBuilderList() { return applicationPreemptedResourceUsageMap_; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ @java.lang.Override public int getApplicationPreemptedResourceUsageMapCount() { return applicationPreemptedResourceUsageMap_.size(); } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index) { return applicationPreemptedResourceUsageMap_.get(index); } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder( int index) { return applicationPreemptedResourceUsageMap_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasUsedResources()) { if (!getUsedResources().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasReservedResources()) { if (!getReservedResources().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasNeededResources()) { if (!getNeededResources().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getApplicationResourceUsageMapCount(); i++) { if (!getApplicationResourceUsageMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getApplicationPreemptedResourceUsageMapCount(); i++) { if (!getApplicationPreemptedResourceUsageMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, numUsedContainers_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, numReservedContainers_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getUsedResources()); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getReservedResources()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(5, getNeededResources()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt64(6, memorySeconds_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeInt64(7, vcoreSeconds_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeFloat(8, queueUsagePercentage_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeFloat(9, clusterUsagePercentage_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeInt64(10, preemptedMemorySeconds_); } if (((bitField0_ & 0x00000400) != 0)) { output.writeInt64(11, preemptedVcoreSeconds_); } for (int i = 0; i < applicationResourceUsageMap_.size(); i++) { output.writeMessage(12, applicationResourceUsageMap_.get(i)); } for (int i = 0; i < applicationPreemptedResourceUsageMap_.size(); i++) { output.writeMessage(13, applicationPreemptedResourceUsageMap_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, numUsedContainers_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, numReservedContainers_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getUsedResources()); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getReservedResources()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getNeededResources()); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(6, memorySeconds_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(7, vcoreSeconds_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(8, queueUsagePercentage_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(9, clusterUsagePercentage_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(10, preemptedMemorySeconds_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(11, preemptedVcoreSeconds_); } for (int i = 0; i < applicationResourceUsageMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(12, applicationResourceUsageMap_.get(i)); } for (int i = 0; i < applicationPreemptedResourceUsageMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(13, applicationPreemptedResourceUsageMap_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto) obj; if (hasNumUsedContainers() != other.hasNumUsedContainers()) return false; if (hasNumUsedContainers()) { if (getNumUsedContainers() != other.getNumUsedContainers()) return false; } if (hasNumReservedContainers() != other.hasNumReservedContainers()) return false; if (hasNumReservedContainers()) { if (getNumReservedContainers() != other.getNumReservedContainers()) return false; } if (hasUsedResources() != other.hasUsedResources()) return false; if (hasUsedResources()) { if (!getUsedResources() .equals(other.getUsedResources())) return false; } if (hasReservedResources() != other.hasReservedResources()) return false; if (hasReservedResources()) { if (!getReservedResources() .equals(other.getReservedResources())) return false; } if (hasNeededResources() != other.hasNeededResources()) return false; if (hasNeededResources()) { if (!getNeededResources() .equals(other.getNeededResources())) return false; } if (hasMemorySeconds() != other.hasMemorySeconds()) return false; if (hasMemorySeconds()) { if (getMemorySeconds() != other.getMemorySeconds()) return false; } if (hasVcoreSeconds() != other.hasVcoreSeconds()) return false; if (hasVcoreSeconds()) { if (getVcoreSeconds() != other.getVcoreSeconds()) return false; } if (hasQueueUsagePercentage() != other.hasQueueUsagePercentage()) return false; if (hasQueueUsagePercentage()) { if (java.lang.Float.floatToIntBits(getQueueUsagePercentage()) != java.lang.Float.floatToIntBits( other.getQueueUsagePercentage())) return false; } if (hasClusterUsagePercentage() != other.hasClusterUsagePercentage()) return false; if (hasClusterUsagePercentage()) { if (java.lang.Float.floatToIntBits(getClusterUsagePercentage()) != java.lang.Float.floatToIntBits( other.getClusterUsagePercentage())) return false; } if (hasPreemptedMemorySeconds() != other.hasPreemptedMemorySeconds()) return false; if (hasPreemptedMemorySeconds()) { if (getPreemptedMemorySeconds() != other.getPreemptedMemorySeconds()) return false; } if (hasPreemptedVcoreSeconds() != other.hasPreemptedVcoreSeconds()) return false; if (hasPreemptedVcoreSeconds()) { if (getPreemptedVcoreSeconds() != other.getPreemptedVcoreSeconds()) return false; } if (!getApplicationResourceUsageMapList() .equals(other.getApplicationResourceUsageMapList())) return false; if (!getApplicationPreemptedResourceUsageMapList() .equals(other.getApplicationPreemptedResourceUsageMapList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNumUsedContainers()) { hash = (37 * hash) + NUM_USED_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getNumUsedContainers(); } if (hasNumReservedContainers()) { hash = (37 * hash) + NUM_RESERVED_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getNumReservedContainers(); } if (hasUsedResources()) { hash = (37 * hash) + USED_RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getUsedResources().hashCode(); } if (hasReservedResources()) { hash = (37 * hash) + RESERVED_RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getReservedResources().hashCode(); } if (hasNeededResources()) { hash = (37 * hash) + NEEDED_RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getNeededResources().hashCode(); } if (hasMemorySeconds()) { hash = (37 * hash) + MEMORY_SECONDS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getMemorySeconds()); } if (hasVcoreSeconds()) { hash = (37 * hash) + VCORE_SECONDS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getVcoreSeconds()); } if (hasQueueUsagePercentage()) { hash = (37 * hash) + QUEUE_USAGE_PERCENTAGE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getQueueUsagePercentage()); } if (hasClusterUsagePercentage()) { hash = (37 * hash) + CLUSTER_USAGE_PERCENTAGE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getClusterUsagePercentage()); } if (hasPreemptedMemorySeconds()) { hash = (37 * hash) + PREEMPTED_MEMORY_SECONDS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getPreemptedMemorySeconds()); } if (hasPreemptedVcoreSeconds()) { hash = (37 * hash) + PREEMPTED_VCORE_SECONDS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getPreemptedVcoreSeconds()); } if (getApplicationResourceUsageMapCount() > 0) { hash = (37 * hash) + APPLICATION_RESOURCE_USAGE_MAP_FIELD_NUMBER; hash = (53 * hash) + getApplicationResourceUsageMapList().hashCode(); } if (getApplicationPreemptedResourceUsageMapCount() > 0) { hash = (37 * hash) + APPLICATION_PREEMPTED_RESOURCE_USAGE_MAP_FIELD_NUMBER; hash = (53 * hash) + getApplicationPreemptedResourceUsageMapList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationResourceUsageReportProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationResourceUsageReportProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUsedResourcesFieldBuilder(); getReservedResourcesFieldBuilder(); getNeededResourcesFieldBuilder(); getApplicationResourceUsageMapFieldBuilder(); getApplicationPreemptedResourceUsageMapFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; numUsedContainers_ = 0; numReservedContainers_ = 0; usedResources_ = null; if (usedResourcesBuilder_ != null) { usedResourcesBuilder_.dispose(); usedResourcesBuilder_ = null; } reservedResources_ = null; if (reservedResourcesBuilder_ != null) { reservedResourcesBuilder_.dispose(); reservedResourcesBuilder_ = null; } neededResources_ = null; if (neededResourcesBuilder_ != null) { neededResourcesBuilder_.dispose(); neededResourcesBuilder_ = null; } memorySeconds_ = 0L; vcoreSeconds_ = 0L; queueUsagePercentage_ = 0F; clusterUsagePercentage_ = 0F; preemptedMemorySeconds_ = 0L; preemptedVcoreSeconds_ = 0L; if (applicationResourceUsageMapBuilder_ == null) { applicationResourceUsageMap_ = java.util.Collections.emptyList(); } else { applicationResourceUsageMap_ = null; applicationResourceUsageMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000800); if (applicationPreemptedResourceUsageMapBuilder_ == null) { applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList(); } else { applicationPreemptedResourceUsageMap_ = null; applicationPreemptedResourceUsageMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00001000); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result) { if (applicationResourceUsageMapBuilder_ == null) { if (((bitField0_ & 0x00000800) != 0)) { applicationResourceUsageMap_ = java.util.Collections.unmodifiableList(applicationResourceUsageMap_); bitField0_ = (bitField0_ & ~0x00000800); } result.applicationResourceUsageMap_ = applicationResourceUsageMap_; } else { result.applicationResourceUsageMap_ = applicationResourceUsageMapBuilder_.build(); } if (applicationPreemptedResourceUsageMapBuilder_ == null) { if (((bitField0_ & 0x00001000) != 0)) { applicationPreemptedResourceUsageMap_ = java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_); bitField0_ = (bitField0_ & ~0x00001000); } result.applicationPreemptedResourceUsageMap_ = applicationPreemptedResourceUsageMap_; } else { result.applicationPreemptedResourceUsageMap_ = applicationPreemptedResourceUsageMapBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.numUsedContainers_ = numUsedContainers_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.numReservedContainers_ = numReservedContainers_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.usedResources_ = usedResourcesBuilder_ == null ? usedResources_ : usedResourcesBuilder_.build(); to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.reservedResources_ = reservedResourcesBuilder_ == null ? reservedResources_ : reservedResourcesBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.neededResources_ = neededResourcesBuilder_ == null ? neededResources_ : neededResourcesBuilder_.build(); to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.memorySeconds_ = memorySeconds_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.vcoreSeconds_ = vcoreSeconds_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.queueUsagePercentage_ = queueUsagePercentage_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.clusterUsagePercentage_ = clusterUsagePercentage_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.preemptedMemorySeconds_ = preemptedMemorySeconds_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000400) != 0)) { result.preemptedVcoreSeconds_ = preemptedVcoreSeconds_; to_bitField0_ |= 0x00000400; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance()) return this; if (other.hasNumUsedContainers()) { setNumUsedContainers(other.getNumUsedContainers()); } if (other.hasNumReservedContainers()) { setNumReservedContainers(other.getNumReservedContainers()); } if (other.hasUsedResources()) { mergeUsedResources(other.getUsedResources()); } if (other.hasReservedResources()) { mergeReservedResources(other.getReservedResources()); } if (other.hasNeededResources()) { mergeNeededResources(other.getNeededResources()); } if (other.hasMemorySeconds()) { setMemorySeconds(other.getMemorySeconds()); } if (other.hasVcoreSeconds()) { setVcoreSeconds(other.getVcoreSeconds()); } if (other.hasQueueUsagePercentage()) { setQueueUsagePercentage(other.getQueueUsagePercentage()); } if (other.hasClusterUsagePercentage()) { setClusterUsagePercentage(other.getClusterUsagePercentage()); } if (other.hasPreemptedMemorySeconds()) { setPreemptedMemorySeconds(other.getPreemptedMemorySeconds()); } if (other.hasPreemptedVcoreSeconds()) { setPreemptedVcoreSeconds(other.getPreemptedVcoreSeconds()); } if (applicationResourceUsageMapBuilder_ == null) { if (!other.applicationResourceUsageMap_.isEmpty()) { if (applicationResourceUsageMap_.isEmpty()) { applicationResourceUsageMap_ = other.applicationResourceUsageMap_; bitField0_ = (bitField0_ & ~0x00000800); } else { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.addAll(other.applicationResourceUsageMap_); } onChanged(); } } else { if (!other.applicationResourceUsageMap_.isEmpty()) { if (applicationResourceUsageMapBuilder_.isEmpty()) { applicationResourceUsageMapBuilder_.dispose(); applicationResourceUsageMapBuilder_ = null; applicationResourceUsageMap_ = other.applicationResourceUsageMap_; bitField0_ = (bitField0_ & ~0x00000800); applicationResourceUsageMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationResourceUsageMapFieldBuilder() : null; } else { applicationResourceUsageMapBuilder_.addAllMessages(other.applicationResourceUsageMap_); } } } if (applicationPreemptedResourceUsageMapBuilder_ == null) { if (!other.applicationPreemptedResourceUsageMap_.isEmpty()) { if (applicationPreemptedResourceUsageMap_.isEmpty()) { applicationPreemptedResourceUsageMap_ = other.applicationPreemptedResourceUsageMap_; bitField0_ = (bitField0_ & ~0x00001000); } else { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.addAll(other.applicationPreemptedResourceUsageMap_); } onChanged(); } } else { if (!other.applicationPreemptedResourceUsageMap_.isEmpty()) { if (applicationPreemptedResourceUsageMapBuilder_.isEmpty()) { applicationPreemptedResourceUsageMapBuilder_.dispose(); applicationPreemptedResourceUsageMapBuilder_ = null; applicationPreemptedResourceUsageMap_ = other.applicationPreemptedResourceUsageMap_; bitField0_ = (bitField0_ & ~0x00001000); applicationPreemptedResourceUsageMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationPreemptedResourceUsageMapFieldBuilder() : null; } else { applicationPreemptedResourceUsageMapBuilder_.addAllMessages(other.applicationPreemptedResourceUsageMap_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasUsedResources()) { if (!getUsedResources().isInitialized()) { return false; } } if (hasReservedResources()) { if (!getReservedResources().isInitialized()) { return false; } } if (hasNeededResources()) { if (!getNeededResources().isInitialized()) { return false; } } for (int i = 0; i < getApplicationResourceUsageMapCount(); i++) { if (!getApplicationResourceUsageMap(i).isInitialized()) { return false; } } for (int i = 0; i < getApplicationPreemptedResourceUsageMapCount(); i++) { if (!getApplicationPreemptedResourceUsageMap(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { numUsedContainers_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { numReservedContainers_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { input.readMessage( getUsedResourcesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getReservedResourcesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 case 42: { input.readMessage( getNeededResourcesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 48: { memorySeconds_ = input.readInt64(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { vcoreSeconds_ = input.readInt64(); bitField0_ |= 0x00000040; break; } // case 56 case 69: { queueUsagePercentage_ = input.readFloat(); bitField0_ |= 0x00000080; break; } // case 69 case 77: { clusterUsagePercentage_ = input.readFloat(); bitField0_ |= 0x00000100; break; } // case 77 case 80: { preemptedMemorySeconds_ = input.readInt64(); bitField0_ |= 0x00000200; break; } // case 80 case 88: { preemptedVcoreSeconds_ = input.readInt64(); bitField0_ |= 0x00000400; break; } // case 88 case 98: { org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.PARSER, extensionRegistry); if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.add(m); } else { applicationResourceUsageMapBuilder_.addMessage(m); } break; } // case 98 case 106: { org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.PARSER, extensionRegistry); if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.add(m); } else { applicationPreemptedResourceUsageMapBuilder_.addMessage(m); } break; } // case 106 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int numUsedContainers_ ; /** * optional int32 num_used_containers = 1; * @return Whether the numUsedContainers field is set. */ @java.lang.Override public boolean hasNumUsedContainers() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 num_used_containers = 1; * @return The numUsedContainers. */ @java.lang.Override public int getNumUsedContainers() { return numUsedContainers_; } /** * optional int32 num_used_containers = 1; * @param value The numUsedContainers to set. * @return This builder for chaining. */ public Builder setNumUsedContainers(int value) { numUsedContainers_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 num_used_containers = 1; * @return This builder for chaining. */ public Builder clearNumUsedContainers() { bitField0_ = (bitField0_ & ~0x00000001); numUsedContainers_ = 0; onChanged(); return this; } private int numReservedContainers_ ; /** * optional int32 num_reserved_containers = 2; * @return Whether the numReservedContainers field is set. */ @java.lang.Override public boolean hasNumReservedContainers() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_reserved_containers = 2; * @return The numReservedContainers. */ @java.lang.Override public int getNumReservedContainers() { return numReservedContainers_; } /** * optional int32 num_reserved_containers = 2; * @param value The numReservedContainers to set. * @return This builder for chaining. */ public Builder setNumReservedContainers(int value) { numReservedContainers_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 num_reserved_containers = 2; * @return This builder for chaining. */ public Builder clearNumReservedContainers() { bitField0_ = (bitField0_ & ~0x00000002); numReservedContainers_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto usedResources_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> usedResourcesBuilder_; /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return Whether the usedResources field is set. */ public boolean hasUsedResources() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; * @return The usedResources. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources() { if (usedResourcesBuilder_ == null) { return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_; } else { return usedResourcesBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public Builder setUsedResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (usedResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } usedResources_ = value; } else { usedResourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public Builder setUsedResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (usedResourcesBuilder_ == null) { usedResources_ = builderForValue.build(); } else { usedResourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public Builder mergeUsedResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (usedResourcesBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && usedResources_ != null && usedResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getUsedResourcesBuilder().mergeFrom(value); } else { usedResources_ = value; } } else { usedResourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public Builder clearUsedResources() { bitField0_ = (bitField0_ & ~0x00000004); usedResources_ = null; if (usedResourcesBuilder_ != null) { usedResourcesBuilder_.dispose(); usedResourcesBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getUsedResourcesBuilder() { bitField0_ |= 0x00000004; onChanged(); return getUsedResourcesFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder() { if (usedResourcesBuilder_ != null) { return usedResourcesBuilder_.getMessageOrBuilder(); } else { return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_; } } /** * optional .hadoop.yarn.ResourceProto used_resources = 3; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getUsedResourcesFieldBuilder() { if (usedResourcesBuilder_ == null) { usedResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getUsedResources(), getParentForChildren(), isClean()); usedResources_ = null; } return usedResourcesBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto reservedResources_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> reservedResourcesBuilder_; /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return Whether the reservedResources field is set. */ public boolean hasReservedResources() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; * @return The reservedResources. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources() { if (reservedResourcesBuilder_ == null) { return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_; } else { return reservedResourcesBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public Builder setReservedResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (reservedResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservedResources_ = value; } else { reservedResourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public Builder setReservedResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (reservedResourcesBuilder_ == null) { reservedResources_ = builderForValue.build(); } else { reservedResourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public Builder mergeReservedResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (reservedResourcesBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && reservedResources_ != null && reservedResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getReservedResourcesBuilder().mergeFrom(value); } else { reservedResources_ = value; } } else { reservedResourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public Builder clearReservedResources() { bitField0_ = (bitField0_ & ~0x00000008); reservedResources_ = null; if (reservedResourcesBuilder_ != null) { reservedResourcesBuilder_.dispose(); reservedResourcesBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getReservedResourcesBuilder() { bitField0_ |= 0x00000008; onChanged(); return getReservedResourcesFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder() { if (reservedResourcesBuilder_ != null) { return reservedResourcesBuilder_.getMessageOrBuilder(); } else { return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_; } } /** * optional .hadoop.yarn.ResourceProto reserved_resources = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getReservedResourcesFieldBuilder() { if (reservedResourcesBuilder_ == null) { reservedResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getReservedResources(), getParentForChildren(), isClean()); reservedResources_ = null; } return reservedResourcesBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto neededResources_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> neededResourcesBuilder_; /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return Whether the neededResources field is set. */ public boolean hasNeededResources() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; * @return The neededResources. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources() { if (neededResourcesBuilder_ == null) { return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_; } else { return neededResourcesBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public Builder setNeededResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (neededResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } neededResources_ = value; } else { neededResourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public Builder setNeededResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (neededResourcesBuilder_ == null) { neededResources_ = builderForValue.build(); } else { neededResourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public Builder mergeNeededResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (neededResourcesBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && neededResources_ != null && neededResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getNeededResourcesBuilder().mergeFrom(value); } else { neededResources_ = value; } } else { neededResourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public Builder clearNeededResources() { bitField0_ = (bitField0_ & ~0x00000010); neededResources_ = null; if (neededResourcesBuilder_ != null) { neededResourcesBuilder_.dispose(); neededResourcesBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getNeededResourcesBuilder() { bitField0_ |= 0x00000010; onChanged(); return getNeededResourcesFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder() { if (neededResourcesBuilder_ != null) { return neededResourcesBuilder_.getMessageOrBuilder(); } else { return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_; } } /** * optional .hadoop.yarn.ResourceProto needed_resources = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getNeededResourcesFieldBuilder() { if (neededResourcesBuilder_ == null) { neededResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getNeededResources(), getParentForChildren(), isClean()); neededResources_ = null; } return neededResourcesBuilder_; } private long memorySeconds_ ; /** * optional int64 memory_seconds = 6; * @return Whether the memorySeconds field is set. */ @java.lang.Override public boolean hasMemorySeconds() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 memory_seconds = 6; * @return The memorySeconds. */ @java.lang.Override public long getMemorySeconds() { return memorySeconds_; } /** * optional int64 memory_seconds = 6; * @param value The memorySeconds to set. * @return This builder for chaining. */ public Builder setMemorySeconds(long value) { memorySeconds_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int64 memory_seconds = 6; * @return This builder for chaining. */ public Builder clearMemorySeconds() { bitField0_ = (bitField0_ & ~0x00000020); memorySeconds_ = 0L; onChanged(); return this; } private long vcoreSeconds_ ; /** * optional int64 vcore_seconds = 7; * @return Whether the vcoreSeconds field is set. */ @java.lang.Override public boolean hasVcoreSeconds() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 vcore_seconds = 7; * @return The vcoreSeconds. */ @java.lang.Override public long getVcoreSeconds() { return vcoreSeconds_; } /** * optional int64 vcore_seconds = 7; * @param value The vcoreSeconds to set. * @return This builder for chaining. */ public Builder setVcoreSeconds(long value) { vcoreSeconds_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional int64 vcore_seconds = 7; * @return This builder for chaining. */ public Builder clearVcoreSeconds() { bitField0_ = (bitField0_ & ~0x00000040); vcoreSeconds_ = 0L; onChanged(); return this; } private float queueUsagePercentage_ ; /** * optional float queue_usage_percentage = 8; * @return Whether the queueUsagePercentage field is set. */ @java.lang.Override public boolean hasQueueUsagePercentage() { return ((bitField0_ & 0x00000080) != 0); } /** * optional float queue_usage_percentage = 8; * @return The queueUsagePercentage. */ @java.lang.Override public float getQueueUsagePercentage() { return queueUsagePercentage_; } /** * optional float queue_usage_percentage = 8; * @param value The queueUsagePercentage to set. * @return This builder for chaining. */ public Builder setQueueUsagePercentage(float value) { queueUsagePercentage_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional float queue_usage_percentage = 8; * @return This builder for chaining. */ public Builder clearQueueUsagePercentage() { bitField0_ = (bitField0_ & ~0x00000080); queueUsagePercentage_ = 0F; onChanged(); return this; } private float clusterUsagePercentage_ ; /** * optional float cluster_usage_percentage = 9; * @return Whether the clusterUsagePercentage field is set. */ @java.lang.Override public boolean hasClusterUsagePercentage() { return ((bitField0_ & 0x00000100) != 0); } /** * optional float cluster_usage_percentage = 9; * @return The clusterUsagePercentage. */ @java.lang.Override public float getClusterUsagePercentage() { return clusterUsagePercentage_; } /** * optional float cluster_usage_percentage = 9; * @param value The clusterUsagePercentage to set. * @return This builder for chaining. */ public Builder setClusterUsagePercentage(float value) { clusterUsagePercentage_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional float cluster_usage_percentage = 9; * @return This builder for chaining. */ public Builder clearClusterUsagePercentage() { bitField0_ = (bitField0_ & ~0x00000100); clusterUsagePercentage_ = 0F; onChanged(); return this; } private long preemptedMemorySeconds_ ; /** * optional int64 preempted_memory_seconds = 10; * @return Whether the preemptedMemorySeconds field is set. */ @java.lang.Override public boolean hasPreemptedMemorySeconds() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 preempted_memory_seconds = 10; * @return The preemptedMemorySeconds. */ @java.lang.Override public long getPreemptedMemorySeconds() { return preemptedMemorySeconds_; } /** * optional int64 preempted_memory_seconds = 10; * @param value The preemptedMemorySeconds to set. * @return This builder for chaining. */ public Builder setPreemptedMemorySeconds(long value) { preemptedMemorySeconds_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional int64 preempted_memory_seconds = 10; * @return This builder for chaining. */ public Builder clearPreemptedMemorySeconds() { bitField0_ = (bitField0_ & ~0x00000200); preemptedMemorySeconds_ = 0L; onChanged(); return this; } private long preemptedVcoreSeconds_ ; /** * optional int64 preempted_vcore_seconds = 11; * @return Whether the preemptedVcoreSeconds field is set. */ @java.lang.Override public boolean hasPreemptedVcoreSeconds() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 preempted_vcore_seconds = 11; * @return The preemptedVcoreSeconds. */ @java.lang.Override public long getPreemptedVcoreSeconds() { return preemptedVcoreSeconds_; } /** * optional int64 preempted_vcore_seconds = 11; * @param value The preemptedVcoreSeconds to set. * @return This builder for chaining. */ public Builder setPreemptedVcoreSeconds(long value) { preemptedVcoreSeconds_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional int64 preempted_vcore_seconds = 11; * @return This builder for chaining. */ public Builder clearPreemptedVcoreSeconds() { bitField0_ = (bitField0_ & ~0x00000400); preemptedVcoreSeconds_ = 0L; onChanged(); return this; } private java.util.List applicationResourceUsageMap_ = java.util.Collections.emptyList(); private void ensureApplicationResourceUsageMapIsMutable() { if (!((bitField0_ & 0x00000800) != 0)) { applicationResourceUsageMap_ = new java.util.ArrayList(applicationResourceUsageMap_); bitField0_ |= 0x00000800; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> applicationResourceUsageMapBuilder_; /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public java.util.List getApplicationResourceUsageMapList() { if (applicationResourceUsageMapBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationResourceUsageMap_); } else { return applicationResourceUsageMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public int getApplicationResourceUsageMapCount() { if (applicationResourceUsageMapBuilder_ == null) { return applicationResourceUsageMap_.size(); } else { return applicationResourceUsageMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index) { if (applicationResourceUsageMapBuilder_ == null) { return applicationResourceUsageMap_.get(index); } else { return applicationResourceUsageMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder setApplicationResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.set(index, value); onChanged(); } else { applicationResourceUsageMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder setApplicationResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.set(index, builderForValue.build()); onChanged(); } else { applicationResourceUsageMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder addApplicationResourceUsageMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.add(value); onChanged(); } else { applicationResourceUsageMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder addApplicationResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.add(index, value); onChanged(); } else { applicationResourceUsageMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder addApplicationResourceUsageMap( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.add(builderForValue.build()); onChanged(); } else { applicationResourceUsageMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder addApplicationResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.add(index, builderForValue.build()); onChanged(); } else { applicationResourceUsageMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder addAllApplicationResourceUsageMap( java.lang.Iterable values) { if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationResourceUsageMap_); onChanged(); } else { applicationResourceUsageMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder clearApplicationResourceUsageMap() { if (applicationResourceUsageMapBuilder_ == null) { applicationResourceUsageMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000800); onChanged(); } else { applicationResourceUsageMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public Builder removeApplicationResourceUsageMap(int index) { if (applicationResourceUsageMapBuilder_ == null) { ensureApplicationResourceUsageMapIsMutable(); applicationResourceUsageMap_.remove(index); onChanged(); } else { applicationResourceUsageMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder getApplicationResourceUsageMapBuilder( int index) { return getApplicationResourceUsageMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder( int index) { if (applicationResourceUsageMapBuilder_ == null) { return applicationResourceUsageMap_.get(index); } else { return applicationResourceUsageMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public java.util.List getApplicationResourceUsageMapOrBuilderList() { if (applicationResourceUsageMapBuilder_ != null) { return applicationResourceUsageMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationResourceUsageMap_); } } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationResourceUsageMapBuilder() { return getApplicationResourceUsageMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationResourceUsageMapBuilder( int index) { return getApplicationResourceUsageMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12; */ public java.util.List getApplicationResourceUsageMapBuilderList() { return getApplicationResourceUsageMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> getApplicationResourceUsageMapFieldBuilder() { if (applicationResourceUsageMapBuilder_ == null) { applicationResourceUsageMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder>( applicationResourceUsageMap_, ((bitField0_ & 0x00000800) != 0), getParentForChildren(), isClean()); applicationResourceUsageMap_ = null; } return applicationResourceUsageMapBuilder_; } private java.util.List applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList(); private void ensureApplicationPreemptedResourceUsageMapIsMutable() { if (!((bitField0_ & 0x00001000) != 0)) { applicationPreemptedResourceUsageMap_ = new java.util.ArrayList(applicationPreemptedResourceUsageMap_); bitField0_ |= 0x00001000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> applicationPreemptedResourceUsageMapBuilder_; /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public java.util.List getApplicationPreemptedResourceUsageMapList() { if (applicationPreemptedResourceUsageMapBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_); } else { return applicationPreemptedResourceUsageMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public int getApplicationPreemptedResourceUsageMapCount() { if (applicationPreemptedResourceUsageMapBuilder_ == null) { return applicationPreemptedResourceUsageMap_.size(); } else { return applicationPreemptedResourceUsageMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { return applicationPreemptedResourceUsageMap_.get(index); } else { return applicationPreemptedResourceUsageMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder setApplicationPreemptedResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.set(index, value); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder setApplicationPreemptedResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.set(index, builderForValue.build()); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder addApplicationPreemptedResourceUsageMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.add(value); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder addApplicationPreemptedResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.add(index, value); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder addApplicationPreemptedResourceUsageMap( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.add(builderForValue.build()); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder addApplicationPreemptedResourceUsageMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.add(index, builderForValue.build()); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder addAllApplicationPreemptedResourceUsageMap( java.lang.Iterable values) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationPreemptedResourceUsageMap_); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder clearApplicationPreemptedResourceUsageMap() { if (applicationPreemptedResourceUsageMapBuilder_ == null) { applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00001000); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public Builder removeApplicationPreemptedResourceUsageMap(int index) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { ensureApplicationPreemptedResourceUsageMapIsMutable(); applicationPreemptedResourceUsageMap_.remove(index); onChanged(); } else { applicationPreemptedResourceUsageMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder getApplicationPreemptedResourceUsageMapBuilder( int index) { return getApplicationPreemptedResourceUsageMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder( int index) { if (applicationPreemptedResourceUsageMapBuilder_ == null) { return applicationPreemptedResourceUsageMap_.get(index); } else { return applicationPreemptedResourceUsageMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public java.util.List getApplicationPreemptedResourceUsageMapOrBuilderList() { if (applicationPreemptedResourceUsageMapBuilder_ != null) { return applicationPreemptedResourceUsageMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_); } } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationPreemptedResourceUsageMapBuilder() { return getApplicationPreemptedResourceUsageMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationPreemptedResourceUsageMapBuilder( int index) { return getApplicationPreemptedResourceUsageMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13; */ public java.util.List getApplicationPreemptedResourceUsageMapBuilderList() { return getApplicationPreemptedResourceUsageMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> getApplicationPreemptedResourceUsageMapFieldBuilder() { if (applicationPreemptedResourceUsageMapBuilder_ == null) { applicationPreemptedResourceUsageMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder>( applicationPreemptedResourceUsageMap_, ((bitField0_ & 0x00001000) != 0), getParentForChildren(), isClean()); applicationPreemptedResourceUsageMap_ = null; } return applicationPreemptedResourceUsageMapBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationResourceUsageReportProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationResourceUsageReportProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationResourceUsageReportProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationReportProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationReportProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return Whether the applicationId field is set. */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return The applicationId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional string user = 2; * @return Whether the user field is set. */ boolean hasUser(); /** * optional string user = 2; * @return The user. */ java.lang.String getUser(); /** * optional string user = 2; * @return The bytes for user. */ org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes(); /** * optional string queue = 3; * @return Whether the queue field is set. */ boolean hasQueue(); /** * optional string queue = 3; * @return The queue. */ java.lang.String getQueue(); /** * optional string queue = 3; * @return The bytes for queue. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * optional string name = 4; * @return Whether the name field is set. */ boolean hasName(); /** * optional string name = 4; * @return The name. */ java.lang.String getName(); /** * optional string name = 4; * @return The bytes for name. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes(); /** * optional string host = 5; * @return Whether the host field is set. */ boolean hasHost(); /** * optional string host = 5; * @return The host. */ java.lang.String getHost(); /** * optional string host = 5; * @return The bytes for host. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes(); /** * optional int32 rpc_port = 6; * @return Whether the rpcPort field is set. */ boolean hasRpcPort(); /** * optional int32 rpc_port = 6; * @return The rpcPort. */ int getRpcPort(); /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return Whether the clientToAmToken field is set. */ boolean hasClientToAmToken(); /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return The clientToAmToken. */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken(); /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder(); /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return Whether the yarnApplicationState field is set. */ boolean hasYarnApplicationState(); /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return The yarnApplicationState. */ org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState(); /** * optional string trackingUrl = 9; * @return Whether the trackingUrl field is set. */ boolean hasTrackingUrl(); /** * optional string trackingUrl = 9; * @return The trackingUrl. */ java.lang.String getTrackingUrl(); /** * optional string trackingUrl = 9; * @return The bytes for trackingUrl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes(); /** * optional string diagnostics = 10 [default = "N/A"]; * @return Whether the diagnostics field is set. */ boolean hasDiagnostics(); /** * optional string diagnostics = 10 [default = "N/A"]; * @return The diagnostics. */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 10 [default = "N/A"]; * @return The bytes for diagnostics. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); /** * optional int64 startTime = 11; * @return Whether the startTime field is set. */ boolean hasStartTime(); /** * optional int64 startTime = 11; * @return The startTime. */ long getStartTime(); /** * optional int64 finishTime = 12; * @return Whether the finishTime field is set. */ boolean hasFinishTime(); /** * optional int64 finishTime = 12; * @return The finishTime. */ long getFinishTime(); /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return Whether the finalApplicationStatus field is set. */ boolean hasFinalApplicationStatus(); /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return The finalApplicationStatus. */ org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus(); /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return Whether the appResourceUsage field is set. */ boolean hasAppResourceUsage(); /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return The appResourceUsage. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage(); /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder(); /** * optional string originalTrackingUrl = 15; * @return Whether the originalTrackingUrl field is set. */ boolean hasOriginalTrackingUrl(); /** * optional string originalTrackingUrl = 15; * @return The originalTrackingUrl. */ java.lang.String getOriginalTrackingUrl(); /** * optional string originalTrackingUrl = 15; * @return The bytes for originalTrackingUrl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return Whether the currentApplicationAttemptId field is set. */ boolean hasCurrentApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return The currentApplicationAttemptId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder(); /** * optional float progress = 17; * @return Whether the progress field is set. */ boolean hasProgress(); /** * optional float progress = 17; * @return The progress. */ float getProgress(); /** * optional string applicationType = 18; * @return Whether the applicationType field is set. */ boolean hasApplicationType(); /** * optional string applicationType = 18; * @return The applicationType. */ java.lang.String getApplicationType(); /** * optional string applicationType = 18; * @return The bytes for applicationType. */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes(); /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return Whether the amRmToken field is set. */ boolean hasAmRmToken(); /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return The amRmToken. */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken(); /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder(); /** * repeated string applicationTags = 20; * @return A list containing the applicationTags. */ java.util.List getApplicationTagsList(); /** * repeated string applicationTags = 20; * @return The count of applicationTags. */ int getApplicationTagsCount(); /** * repeated string applicationTags = 20; * @param index The index of the element to return. * @return The applicationTags at the given index. */ java.lang.String getApplicationTags(int index); /** * repeated string applicationTags = 20; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index); /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return Whether the logAggregationStatus field is set. */ boolean hasLogAggregationStatus(); /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return The logAggregationStatus. */ org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus(); /** * optional bool unmanaged_application = 22 [default = false]; * @return Whether the unmanagedApplication field is set. */ boolean hasUnmanagedApplication(); /** * optional bool unmanaged_application = 22 [default = false]; * @return The unmanagedApplication. */ boolean getUnmanagedApplication(); /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 23; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional string appNodeLabelExpression = 24; * @return Whether the appNodeLabelExpression field is set. */ boolean hasAppNodeLabelExpression(); /** * optional string appNodeLabelExpression = 24; * @return The appNodeLabelExpression. */ java.lang.String getAppNodeLabelExpression(); /** * optional string appNodeLabelExpression = 24; * @return The bytes for appNodeLabelExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAppNodeLabelExpressionBytes(); /** * optional string amNodeLabelExpression = 25; * @return Whether the amNodeLabelExpression field is set. */ boolean hasAmNodeLabelExpression(); /** * optional string amNodeLabelExpression = 25; * @return The amNodeLabelExpression. */ java.lang.String getAmNodeLabelExpression(); /** * optional string amNodeLabelExpression = 25; * @return The bytes for amNodeLabelExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAmNodeLabelExpressionBytes(); /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ java.util.List getAppTimeoutsList(); /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index); /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ int getAppTimeoutsCount(); /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ java.util.List getAppTimeoutsOrBuilderList(); /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder( int index); /** * optional int64 launchTime = 27; * @return Whether the launchTime field is set. */ boolean hasLaunchTime(); /** * optional int64 launchTime = 27; * @return The launchTime. */ long getLaunchTime(); /** * optional int64 submitTime = 28; * @return Whether the submitTime field is set. */ boolean hasSubmitTime(); /** * optional int64 submitTime = 28; * @return The submitTime. */ long getSubmitTime(); /** * optional string rmClusterId = 29; * @return Whether the rmClusterId field is set. */ boolean hasRmClusterId(); /** * optional string rmClusterId = 29; * @return The rmClusterId. */ java.lang.String getRmClusterId(); /** * optional string rmClusterId = 29; * @return The bytes for rmClusterId. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRmClusterIdBytes(); } /** * Protobuf type {@code hadoop.yarn.ApplicationReportProto} */ public static final class ApplicationReportProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationReportProto) ApplicationReportProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationReportProto.newBuilder() to construct. private ApplicationReportProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationReportProto() { user_ = ""; queue_ = ""; name_ = ""; host_ = ""; yarnApplicationState_ = 1; trackingUrl_ = ""; diagnostics_ = "N/A"; finalApplicationStatus_ = 0; originalTrackingUrl_ = ""; applicationType_ = ""; applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; logAggregationStatus_ = 1; appNodeLabelExpression_ = ""; amNodeLabelExpression_ = ""; appTimeouts_ = java.util.Collections.emptyList(); rmClusterId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationReportProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder.class); } private int bitField0_; public static final int APPLICATIONID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return Whether the applicationId field is set. */ @java.lang.Override public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return The applicationId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int USER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object user_ = ""; /** * optional string user = 2; * @return Whether the user field is set. */ @java.lang.Override public boolean hasUser() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string user = 2; * @return The user. */ @java.lang.Override public java.lang.String getUser() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } } /** * optional string user = 2; * @return The bytes for user. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int QUEUE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object queue_ = ""; /** * optional string queue = 3; * @return Whether the queue field is set. */ @java.lang.Override public boolean hasQueue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string queue = 3; * @return The queue. */ @java.lang.Override public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 3; * @return The bytes for queue. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * optional string name = 4; * @return Whether the name field is set. */ @java.lang.Override public boolean hasName() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string name = 4; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * optional string name = 4; * @return The bytes for name. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int HOST_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object host_ = ""; /** * optional string host = 5; * @return Whether the host field is set. */ @java.lang.Override public boolean hasHost() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string host = 5; * @return The host. */ @java.lang.Override public java.lang.String getHost() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } } /** * optional string host = 5; * @return The bytes for host. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RPC_PORT_FIELD_NUMBER = 6; private int rpcPort_ = 0; /** * optional int32 rpc_port = 6; * @return Whether the rpcPort field is set. */ @java.lang.Override public boolean hasRpcPort() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 rpc_port = 6; * @return The rpcPort. */ @java.lang.Override public int getRpcPort() { return rpcPort_; } public static final int CLIENT_TO_AM_TOKEN_FIELD_NUMBER = 7; private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto clientToAmToken_; /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return Whether the clientToAmToken field is set. */ @java.lang.Override public boolean hasClientToAmToken() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return The clientToAmToken. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken() { return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_; } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder() { return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_; } public static final int YARN_APPLICATION_STATE_FIELD_NUMBER = 8; private int yarnApplicationState_ = 1; /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return Whether the yarnApplicationState field is set. */ @java.lang.Override public boolean hasYarnApplicationState() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return The yarnApplicationState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() { org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(yarnApplicationState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result; } public static final int TRACKINGURL_FIELD_NUMBER = 9; @SuppressWarnings("serial") private volatile java.lang.Object trackingUrl_ = ""; /** * optional string trackingUrl = 9; * @return Whether the trackingUrl field is set. */ @java.lang.Override public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000100) != 0); } /** * optional string trackingUrl = 9; * @return The trackingUrl. */ @java.lang.Override public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } } /** * optional string trackingUrl = 9; * @return The bytes for trackingUrl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int DIAGNOSTICS_FIELD_NUMBER = 10; @SuppressWarnings("serial") private volatile java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 10 [default = "N/A"]; * @return Whether the diagnostics field is set. */ @java.lang.Override public boolean hasDiagnostics() { return ((bitField0_ & 0x00000200) != 0); } /** * optional string diagnostics = 10 [default = "N/A"]; * @return The diagnostics. */ @java.lang.Override public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 10 [default = "N/A"]; * @return The bytes for diagnostics. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int STARTTIME_FIELD_NUMBER = 11; private long startTime_ = 0L; /** * optional int64 startTime = 11; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 startTime = 11; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } public static final int FINISHTIME_FIELD_NUMBER = 12; private long finishTime_ = 0L; /** * optional int64 finishTime = 12; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int64 finishTime = 12; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 13; private int finalApplicationStatus_ = 0; /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return Whether the finalApplicationStatus field is set. */ @java.lang.Override public boolean hasFinalApplicationStatus() { return ((bitField0_ & 0x00001000) != 0); } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return The finalApplicationStatus. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() { org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result; } public static final int APP_RESOURCE_USAGE_FIELD_NUMBER = 14; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto appResourceUsage_; /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return Whether the appResourceUsage field is set. */ @java.lang.Override public boolean hasAppResourceUsage() { return ((bitField0_ & 0x00002000) != 0); } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return The appResourceUsage. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage() { return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_; } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder() { return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_; } public static final int ORIGINALTRACKINGURL_FIELD_NUMBER = 15; @SuppressWarnings("serial") private volatile java.lang.Object originalTrackingUrl_ = ""; /** * optional string originalTrackingUrl = 15; * @return Whether the originalTrackingUrl field is set. */ @java.lang.Override public boolean hasOriginalTrackingUrl() { return ((bitField0_ & 0x00004000) != 0); } /** * optional string originalTrackingUrl = 15; * @return The originalTrackingUrl. */ @java.lang.Override public java.lang.String getOriginalTrackingUrl() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { originalTrackingUrl_ = s; } return s; } } /** * optional string originalTrackingUrl = 15; * @return The bytes for originalTrackingUrl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); originalTrackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CURRENTAPPLICATIONATTEMPTID_FIELD_NUMBER = 16; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto currentApplicationAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return Whether the currentApplicationAttemptId field is set. */ @java.lang.Override public boolean hasCurrentApplicationAttemptId() { return ((bitField0_ & 0x00008000) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return The currentApplicationAttemptId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId() { return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder() { return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_; } public static final int PROGRESS_FIELD_NUMBER = 17; private float progress_ = 0F; /** * optional float progress = 17; * @return Whether the progress field is set. */ @java.lang.Override public boolean hasProgress() { return ((bitField0_ & 0x00010000) != 0); } /** * optional float progress = 17; * @return The progress. */ @java.lang.Override public float getProgress() { return progress_; } public static final int APPLICATIONTYPE_FIELD_NUMBER = 18; @SuppressWarnings("serial") private volatile java.lang.Object applicationType_ = ""; /** * optional string applicationType = 18; * @return Whether the applicationType field is set. */ @java.lang.Override public boolean hasApplicationType() { return ((bitField0_ & 0x00020000) != 0); } /** * optional string applicationType = 18; * @return The applicationType. */ @java.lang.Override public java.lang.String getApplicationType() { java.lang.Object ref = applicationType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationType_ = s; } return s; } } /** * optional string applicationType = 18; * @return The bytes for applicationType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes() { java.lang.Object ref = applicationType_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int AM_RM_TOKEN_FIELD_NUMBER = 19; private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_; /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return Whether the amRmToken field is set. */ @java.lang.Override public boolean hasAmRmToken() { return ((bitField0_ & 0x00040000) != 0); } /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return The amRmToken. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } public static final int APPLICATIONTAGS_FIELD_NUMBER = 20; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_; /** * repeated string applicationTags = 20; * @return A list containing the applicationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_; } /** * repeated string applicationTags = 20; * @return The count of applicationTags. */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 20; * @param index The index of the element to return. * @return The applicationTags at the given index. */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 20; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } public static final int LOG_AGGREGATION_STATUS_FIELD_NUMBER = 21; private int logAggregationStatus_ = 1; /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return Whether the logAggregationStatus field is set. */ @java.lang.Override public boolean hasLogAggregationStatus() { return ((bitField0_ & 0x00080000) != 0); } /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return The logAggregationStatus. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() { org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result; } public static final int UNMANAGED_APPLICATION_FIELD_NUMBER = 22; private boolean unmanagedApplication_ = false; /** * optional bool unmanaged_application = 22 [default = false]; * @return Whether the unmanagedApplication field is set. */ @java.lang.Override public boolean hasUnmanagedApplication() { return ((bitField0_ & 0x00100000) != 0); } /** * optional bool unmanaged_application = 22 [default = false]; * @return The unmanagedApplication. */ @java.lang.Override public boolean getUnmanagedApplication() { return unmanagedApplication_; } public static final int PRIORITY_FIELD_NUMBER = 23; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00200000) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int APPNODELABELEXPRESSION_FIELD_NUMBER = 24; @SuppressWarnings("serial") private volatile java.lang.Object appNodeLabelExpression_ = ""; /** * optional string appNodeLabelExpression = 24; * @return Whether the appNodeLabelExpression field is set. */ @java.lang.Override public boolean hasAppNodeLabelExpression() { return ((bitField0_ & 0x00400000) != 0); } /** * optional string appNodeLabelExpression = 24; * @return The appNodeLabelExpression. */ @java.lang.Override public java.lang.String getAppNodeLabelExpression() { java.lang.Object ref = appNodeLabelExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { appNodeLabelExpression_ = s; } return s; } } /** * optional string appNodeLabelExpression = 24; * @return The bytes for appNodeLabelExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAppNodeLabelExpressionBytes() { java.lang.Object ref = appNodeLabelExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); appNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int AMNODELABELEXPRESSION_FIELD_NUMBER = 25; @SuppressWarnings("serial") private volatile java.lang.Object amNodeLabelExpression_ = ""; /** * optional string amNodeLabelExpression = 25; * @return Whether the amNodeLabelExpression field is set. */ @java.lang.Override public boolean hasAmNodeLabelExpression() { return ((bitField0_ & 0x00800000) != 0); } /** * optional string amNodeLabelExpression = 25; * @return The amNodeLabelExpression. */ @java.lang.Override public java.lang.String getAmNodeLabelExpression() { java.lang.Object ref = amNodeLabelExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { amNodeLabelExpression_ = s; } return s; } } /** * optional string amNodeLabelExpression = 25; * @return The bytes for amNodeLabelExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAmNodeLabelExpressionBytes() { java.lang.Object ref = amNodeLabelExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); amNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int APPTIMEOUTS_FIELD_NUMBER = 26; @SuppressWarnings("serial") private java.util.List appTimeouts_; /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ @java.lang.Override public java.util.List getAppTimeoutsList() { return appTimeouts_; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ @java.lang.Override public java.util.List getAppTimeoutsOrBuilderList() { return appTimeouts_; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ @java.lang.Override public int getAppTimeoutsCount() { return appTimeouts_.size(); } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index) { return appTimeouts_.get(index); } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder( int index) { return appTimeouts_.get(index); } public static final int LAUNCHTIME_FIELD_NUMBER = 27; private long launchTime_ = 0L; /** * optional int64 launchTime = 27; * @return Whether the launchTime field is set. */ @java.lang.Override public boolean hasLaunchTime() { return ((bitField0_ & 0x01000000) != 0); } /** * optional int64 launchTime = 27; * @return The launchTime. */ @java.lang.Override public long getLaunchTime() { return launchTime_; } public static final int SUBMITTIME_FIELD_NUMBER = 28; private long submitTime_ = 0L; /** * optional int64 submitTime = 28; * @return Whether the submitTime field is set. */ @java.lang.Override public boolean hasSubmitTime() { return ((bitField0_ & 0x02000000) != 0); } /** * optional int64 submitTime = 28; * @return The submitTime. */ @java.lang.Override public long getSubmitTime() { return submitTime_; } public static final int RMCLUSTERID_FIELD_NUMBER = 29; @SuppressWarnings("serial") private volatile java.lang.Object rmClusterId_ = ""; /** * optional string rmClusterId = 29; * @return Whether the rmClusterId field is set. */ @java.lang.Override public boolean hasRmClusterId() { return ((bitField0_ & 0x04000000) != 0); } /** * optional string rmClusterId = 29; * @return The rmClusterId. */ @java.lang.Override public java.lang.String getRmClusterId() { java.lang.Object ref = rmClusterId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rmClusterId_ = s; } return s; } } /** * optional string rmClusterId = 29; * @return The bytes for rmClusterId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRmClusterIdBytes() { java.lang.Object ref = rmClusterId_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rmClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasClientToAmToken()) { if (!getClientToAmToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasAppResourceUsage()) { if (!getAppResourceUsage().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasAmRmToken()) { if (!getAmRmToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAppTimeoutsCount(); i++) { if (!getAppTimeouts(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, user_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, queue_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, name_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, host_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt32(6, rpcPort_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(7, getClientToAmToken()); } if (((bitField0_ & 0x00000080) != 0)) { output.writeEnum(8, yarnApplicationState_); } if (((bitField0_ & 0x00000100) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, trackingUrl_); } if (((bitField0_ & 0x00000200) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, diagnostics_); } if (((bitField0_ & 0x00000400) != 0)) { output.writeInt64(11, startTime_); } if (((bitField0_ & 0x00000800) != 0)) { output.writeInt64(12, finishTime_); } if (((bitField0_ & 0x00001000) != 0)) { output.writeEnum(13, finalApplicationStatus_); } if (((bitField0_ & 0x00002000) != 0)) { output.writeMessage(14, getAppResourceUsage()); } if (((bitField0_ & 0x00004000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 15, originalTrackingUrl_); } if (((bitField0_ & 0x00008000) != 0)) { output.writeMessage(16, getCurrentApplicationAttemptId()); } if (((bitField0_ & 0x00010000) != 0)) { output.writeFloat(17, progress_); } if (((bitField0_ & 0x00020000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 18, applicationType_); } if (((bitField0_ & 0x00040000) != 0)) { output.writeMessage(19, getAmRmToken()); } for (int i = 0; i < applicationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 20, applicationTags_.getRaw(i)); } if (((bitField0_ & 0x00080000) != 0)) { output.writeEnum(21, logAggregationStatus_); } if (((bitField0_ & 0x00100000) != 0)) { output.writeBool(22, unmanagedApplication_); } if (((bitField0_ & 0x00200000) != 0)) { output.writeMessage(23, getPriority()); } if (((bitField0_ & 0x00400000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 24, appNodeLabelExpression_); } if (((bitField0_ & 0x00800000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 25, amNodeLabelExpression_); } for (int i = 0; i < appTimeouts_.size(); i++) { output.writeMessage(26, appTimeouts_.get(i)); } if (((bitField0_ & 0x01000000) != 0)) { output.writeInt64(27, launchTime_); } if (((bitField0_ & 0x02000000) != 0)) { output.writeInt64(28, submitTime_); } if (((bitField0_ & 0x04000000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 29, rmClusterId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, user_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, queue_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, name_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, host_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(6, rpcPort_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, getClientToAmToken()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(8, yarnApplicationState_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, trackingUrl_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(10, diagnostics_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(11, startTime_); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(12, finishTime_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(13, finalApplicationStatus_); } if (((bitField0_ & 0x00002000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(14, getAppResourceUsage()); } if (((bitField0_ & 0x00004000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(15, originalTrackingUrl_); } if (((bitField0_ & 0x00008000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(16, getCurrentApplicationAttemptId()); } if (((bitField0_ & 0x00010000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(17, progress_); } if (((bitField0_ & 0x00020000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(18, applicationType_); } if (((bitField0_ & 0x00040000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(19, getAmRmToken()); } { int dataSize = 0; for (int i = 0; i < applicationTags_.size(); i++) { dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i)); } size += dataSize; size += 2 * getApplicationTagsList().size(); } if (((bitField0_ & 0x00080000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(21, logAggregationStatus_); } if (((bitField0_ & 0x00100000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(22, unmanagedApplication_); } if (((bitField0_ & 0x00200000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(23, getPriority()); } if (((bitField0_ & 0x00400000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(24, appNodeLabelExpression_); } if (((bitField0_ & 0x00800000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(25, amNodeLabelExpression_); } for (int i = 0; i < appTimeouts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(26, appTimeouts_.get(i)); } if (((bitField0_ & 0x01000000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(27, launchTime_); } if (((bitField0_ & 0x02000000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(28, submitTime_); } if (((bitField0_ & 0x04000000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(29, rmClusterId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasUser() != other.hasUser()) return false; if (hasUser()) { if (!getUser() .equals(other.getUser())) return false; } if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName() .equals(other.getName())) return false; } if (hasHost() != other.hasHost()) return false; if (hasHost()) { if (!getHost() .equals(other.getHost())) return false; } if (hasRpcPort() != other.hasRpcPort()) return false; if (hasRpcPort()) { if (getRpcPort() != other.getRpcPort()) return false; } if (hasClientToAmToken() != other.hasClientToAmToken()) return false; if (hasClientToAmToken()) { if (!getClientToAmToken() .equals(other.getClientToAmToken())) return false; } if (hasYarnApplicationState() != other.hasYarnApplicationState()) return false; if (hasYarnApplicationState()) { if (yarnApplicationState_ != other.yarnApplicationState_) return false; } if (hasTrackingUrl() != other.hasTrackingUrl()) return false; if (hasTrackingUrl()) { if (!getTrackingUrl() .equals(other.getTrackingUrl())) return false; } if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (getStartTime() != other.getStartTime()) return false; } if (hasFinishTime() != other.hasFinishTime()) return false; if (hasFinishTime()) { if (getFinishTime() != other.getFinishTime()) return false; } if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false; if (hasFinalApplicationStatus()) { if (finalApplicationStatus_ != other.finalApplicationStatus_) return false; } if (hasAppResourceUsage() != other.hasAppResourceUsage()) return false; if (hasAppResourceUsage()) { if (!getAppResourceUsage() .equals(other.getAppResourceUsage())) return false; } if (hasOriginalTrackingUrl() != other.hasOriginalTrackingUrl()) return false; if (hasOriginalTrackingUrl()) { if (!getOriginalTrackingUrl() .equals(other.getOriginalTrackingUrl())) return false; } if (hasCurrentApplicationAttemptId() != other.hasCurrentApplicationAttemptId()) return false; if (hasCurrentApplicationAttemptId()) { if (!getCurrentApplicationAttemptId() .equals(other.getCurrentApplicationAttemptId())) return false; } if (hasProgress() != other.hasProgress()) return false; if (hasProgress()) { if (java.lang.Float.floatToIntBits(getProgress()) != java.lang.Float.floatToIntBits( other.getProgress())) return false; } if (hasApplicationType() != other.hasApplicationType()) return false; if (hasApplicationType()) { if (!getApplicationType() .equals(other.getApplicationType())) return false; } if (hasAmRmToken() != other.hasAmRmToken()) return false; if (hasAmRmToken()) { if (!getAmRmToken() .equals(other.getAmRmToken())) return false; } if (!getApplicationTagsList() .equals(other.getApplicationTagsList())) return false; if (hasLogAggregationStatus() != other.hasLogAggregationStatus()) return false; if (hasLogAggregationStatus()) { if (logAggregationStatus_ != other.logAggregationStatus_) return false; } if (hasUnmanagedApplication() != other.hasUnmanagedApplication()) return false; if (hasUnmanagedApplication()) { if (getUnmanagedApplication() != other.getUnmanagedApplication()) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasAppNodeLabelExpression() != other.hasAppNodeLabelExpression()) return false; if (hasAppNodeLabelExpression()) { if (!getAppNodeLabelExpression() .equals(other.getAppNodeLabelExpression())) return false; } if (hasAmNodeLabelExpression() != other.hasAmNodeLabelExpression()) return false; if (hasAmNodeLabelExpression()) { if (!getAmNodeLabelExpression() .equals(other.getAmNodeLabelExpression())) return false; } if (!getAppTimeoutsList() .equals(other.getAppTimeoutsList())) return false; if (hasLaunchTime() != other.hasLaunchTime()) return false; if (hasLaunchTime()) { if (getLaunchTime() != other.getLaunchTime()) return false; } if (hasSubmitTime() != other.hasSubmitTime()) return false; if (hasSubmitTime()) { if (getSubmitTime() != other.getSubmitTime()) return false; } if (hasRmClusterId() != other.hasRmClusterId()) return false; if (hasRmClusterId()) { if (!getRmClusterId() .equals(other.getRmClusterId())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasUser()) { hash = (37 * hash) + USER_FIELD_NUMBER; hash = (53 * hash) + getUser().hashCode(); } if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasHost()) { hash = (37 * hash) + HOST_FIELD_NUMBER; hash = (53 * hash) + getHost().hashCode(); } if (hasRpcPort()) { hash = (37 * hash) + RPC_PORT_FIELD_NUMBER; hash = (53 * hash) + getRpcPort(); } if (hasClientToAmToken()) { hash = (37 * hash) + CLIENT_TO_AM_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getClientToAmToken().hashCode(); } if (hasYarnApplicationState()) { hash = (37 * hash) + YARN_APPLICATION_STATE_FIELD_NUMBER; hash = (53 * hash) + yarnApplicationState_; } if (hasTrackingUrl()) { hash = (37 * hash) + TRACKINGURL_FIELD_NUMBER; hash = (53 * hash) + getTrackingUrl().hashCode(); } if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } if (hasStartTime()) { hash = (37 * hash) + STARTTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartTime()); } if (hasFinishTime()) { hash = (37 * hash) + FINISHTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFinishTime()); } if (hasFinalApplicationStatus()) { hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER; hash = (53 * hash) + finalApplicationStatus_; } if (hasAppResourceUsage()) { hash = (37 * hash) + APP_RESOURCE_USAGE_FIELD_NUMBER; hash = (53 * hash) + getAppResourceUsage().hashCode(); } if (hasOriginalTrackingUrl()) { hash = (37 * hash) + ORIGINALTRACKINGURL_FIELD_NUMBER; hash = (53 * hash) + getOriginalTrackingUrl().hashCode(); } if (hasCurrentApplicationAttemptId()) { hash = (37 * hash) + CURRENTAPPLICATIONATTEMPTID_FIELD_NUMBER; hash = (53 * hash) + getCurrentApplicationAttemptId().hashCode(); } if (hasProgress()) { hash = (37 * hash) + PROGRESS_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getProgress()); } if (hasApplicationType()) { hash = (37 * hash) + APPLICATIONTYPE_FIELD_NUMBER; hash = (53 * hash) + getApplicationType().hashCode(); } if (hasAmRmToken()) { hash = (37 * hash) + AM_RM_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getAmRmToken().hashCode(); } if (getApplicationTagsCount() > 0) { hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTagsList().hashCode(); } if (hasLogAggregationStatus()) { hash = (37 * hash) + LOG_AGGREGATION_STATUS_FIELD_NUMBER; hash = (53 * hash) + logAggregationStatus_; } if (hasUnmanagedApplication()) { hash = (37 * hash) + UNMANAGED_APPLICATION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getUnmanagedApplication()); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasAppNodeLabelExpression()) { hash = (37 * hash) + APPNODELABELEXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getAppNodeLabelExpression().hashCode(); } if (hasAmNodeLabelExpression()) { hash = (37 * hash) + AMNODELABELEXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getAmNodeLabelExpression().hashCode(); } if (getAppTimeoutsCount() > 0) { hash = (37 * hash) + APPTIMEOUTS_FIELD_NUMBER; hash = (53 * hash) + getAppTimeoutsList().hashCode(); } if (hasLaunchTime()) { hash = (37 * hash) + LAUNCHTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getLaunchTime()); } if (hasSubmitTime()) { hash = (37 * hash) + SUBMITTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getSubmitTime()); } if (hasRmClusterId()) { hash = (37 * hash) + RMCLUSTERID_FIELD_NUMBER; hash = (53 * hash) + getRmClusterId().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationReportProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationReportProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); getClientToAmTokenFieldBuilder(); getAppResourceUsageFieldBuilder(); getCurrentApplicationAttemptIdFieldBuilder(); getAmRmTokenFieldBuilder(); getPriorityFieldBuilder(); getAppTimeoutsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } user_ = ""; queue_ = ""; name_ = ""; host_ = ""; rpcPort_ = 0; clientToAmToken_ = null; if (clientToAmTokenBuilder_ != null) { clientToAmTokenBuilder_.dispose(); clientToAmTokenBuilder_ = null; } yarnApplicationState_ = 1; trackingUrl_ = ""; diagnostics_ = "N/A"; startTime_ = 0L; finishTime_ = 0L; finalApplicationStatus_ = 0; appResourceUsage_ = null; if (appResourceUsageBuilder_ != null) { appResourceUsageBuilder_.dispose(); appResourceUsageBuilder_ = null; } originalTrackingUrl_ = ""; currentApplicationAttemptId_ = null; if (currentApplicationAttemptIdBuilder_ != null) { currentApplicationAttemptIdBuilder_.dispose(); currentApplicationAttemptIdBuilder_ = null; } progress_ = 0F; applicationType_ = ""; amRmToken_ = null; if (amRmTokenBuilder_ != null) { amRmTokenBuilder_.dispose(); amRmTokenBuilder_ = null; } applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00080000); logAggregationStatus_ = 1; unmanagedApplication_ = false; priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } appNodeLabelExpression_ = ""; amNodeLabelExpression_ = ""; if (appTimeoutsBuilder_ == null) { appTimeouts_ = java.util.Collections.emptyList(); } else { appTimeouts_ = null; appTimeoutsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x02000000); launchTime_ = 0L; submitTime_ = 0L; rmClusterId_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result) { if (((bitField0_ & 0x00080000) != 0)) { applicationTags_ = applicationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00080000); } result.applicationTags_ = applicationTags_; if (appTimeoutsBuilder_ == null) { if (((bitField0_ & 0x02000000) != 0)) { appTimeouts_ = java.util.Collections.unmodifiableList(appTimeouts_); bitField0_ = (bitField0_ & ~0x02000000); } result.appTimeouts_ = appTimeouts_; } else { result.appTimeouts_ = appTimeoutsBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationId_ = applicationIdBuilder_ == null ? applicationId_ : applicationIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.user_ = user_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.queue_ = queue_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.name_ = name_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.host_ = host_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.rpcPort_ = rpcPort_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.clientToAmToken_ = clientToAmTokenBuilder_ == null ? clientToAmToken_ : clientToAmTokenBuilder_.build(); to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.yarnApplicationState_ = yarnApplicationState_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.trackingUrl_ = trackingUrl_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.diagnostics_ = diagnostics_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000400) != 0)) { result.startTime_ = startTime_; to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00000800) != 0)) { result.finishTime_ = finishTime_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00001000) != 0)) { result.finalApplicationStatus_ = finalApplicationStatus_; to_bitField0_ |= 0x00001000; } if (((from_bitField0_ & 0x00002000) != 0)) { result.appResourceUsage_ = appResourceUsageBuilder_ == null ? appResourceUsage_ : appResourceUsageBuilder_.build(); to_bitField0_ |= 0x00002000; } if (((from_bitField0_ & 0x00004000) != 0)) { result.originalTrackingUrl_ = originalTrackingUrl_; to_bitField0_ |= 0x00004000; } if (((from_bitField0_ & 0x00008000) != 0)) { result.currentApplicationAttemptId_ = currentApplicationAttemptIdBuilder_ == null ? currentApplicationAttemptId_ : currentApplicationAttemptIdBuilder_.build(); to_bitField0_ |= 0x00008000; } if (((from_bitField0_ & 0x00010000) != 0)) { result.progress_ = progress_; to_bitField0_ |= 0x00010000; } if (((from_bitField0_ & 0x00020000) != 0)) { result.applicationType_ = applicationType_; to_bitField0_ |= 0x00020000; } if (((from_bitField0_ & 0x00040000) != 0)) { result.amRmToken_ = amRmTokenBuilder_ == null ? amRmToken_ : amRmTokenBuilder_.build(); to_bitField0_ |= 0x00040000; } if (((from_bitField0_ & 0x00100000) != 0)) { result.logAggregationStatus_ = logAggregationStatus_; to_bitField0_ |= 0x00080000; } if (((from_bitField0_ & 0x00200000) != 0)) { result.unmanagedApplication_ = unmanagedApplication_; to_bitField0_ |= 0x00100000; } if (((from_bitField0_ & 0x00400000) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00200000; } if (((from_bitField0_ & 0x00800000) != 0)) { result.appNodeLabelExpression_ = appNodeLabelExpression_; to_bitField0_ |= 0x00400000; } if (((from_bitField0_ & 0x01000000) != 0)) { result.amNodeLabelExpression_ = amNodeLabelExpression_; to_bitField0_ |= 0x00800000; } if (((from_bitField0_ & 0x04000000) != 0)) { result.launchTime_ = launchTime_; to_bitField0_ |= 0x01000000; } if (((from_bitField0_ & 0x08000000) != 0)) { result.submitTime_ = submitTime_; to_bitField0_ |= 0x02000000; } if (((from_bitField0_ & 0x10000000) != 0)) { result.rmClusterId_ = rmClusterId_; to_bitField0_ |= 0x04000000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasUser()) { user_ = other.user_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasQueue()) { queue_ = other.queue_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasName()) { name_ = other.name_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasHost()) { host_ = other.host_; bitField0_ |= 0x00000010; onChanged(); } if (other.hasRpcPort()) { setRpcPort(other.getRpcPort()); } if (other.hasClientToAmToken()) { mergeClientToAmToken(other.getClientToAmToken()); } if (other.hasYarnApplicationState()) { setYarnApplicationState(other.getYarnApplicationState()); } if (other.hasTrackingUrl()) { trackingUrl_ = other.trackingUrl_; bitField0_ |= 0x00000100; onChanged(); } if (other.hasDiagnostics()) { diagnostics_ = other.diagnostics_; bitField0_ |= 0x00000200; onChanged(); } if (other.hasStartTime()) { setStartTime(other.getStartTime()); } if (other.hasFinishTime()) { setFinishTime(other.getFinishTime()); } if (other.hasFinalApplicationStatus()) { setFinalApplicationStatus(other.getFinalApplicationStatus()); } if (other.hasAppResourceUsage()) { mergeAppResourceUsage(other.getAppResourceUsage()); } if (other.hasOriginalTrackingUrl()) { originalTrackingUrl_ = other.originalTrackingUrl_; bitField0_ |= 0x00004000; onChanged(); } if (other.hasCurrentApplicationAttemptId()) { mergeCurrentApplicationAttemptId(other.getCurrentApplicationAttemptId()); } if (other.hasProgress()) { setProgress(other.getProgress()); } if (other.hasApplicationType()) { applicationType_ = other.applicationType_; bitField0_ |= 0x00020000; onChanged(); } if (other.hasAmRmToken()) { mergeAmRmToken(other.getAmRmToken()); } if (!other.applicationTags_.isEmpty()) { if (applicationTags_.isEmpty()) { applicationTags_ = other.applicationTags_; bitField0_ = (bitField0_ & ~0x00080000); } else { ensureApplicationTagsIsMutable(); applicationTags_.addAll(other.applicationTags_); } onChanged(); } if (other.hasLogAggregationStatus()) { setLogAggregationStatus(other.getLogAggregationStatus()); } if (other.hasUnmanagedApplication()) { setUnmanagedApplication(other.getUnmanagedApplication()); } if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasAppNodeLabelExpression()) { appNodeLabelExpression_ = other.appNodeLabelExpression_; bitField0_ |= 0x00800000; onChanged(); } if (other.hasAmNodeLabelExpression()) { amNodeLabelExpression_ = other.amNodeLabelExpression_; bitField0_ |= 0x01000000; onChanged(); } if (appTimeoutsBuilder_ == null) { if (!other.appTimeouts_.isEmpty()) { if (appTimeouts_.isEmpty()) { appTimeouts_ = other.appTimeouts_; bitField0_ = (bitField0_ & ~0x02000000); } else { ensureAppTimeoutsIsMutable(); appTimeouts_.addAll(other.appTimeouts_); } onChanged(); } } else { if (!other.appTimeouts_.isEmpty()) { if (appTimeoutsBuilder_.isEmpty()) { appTimeoutsBuilder_.dispose(); appTimeoutsBuilder_ = null; appTimeouts_ = other.appTimeouts_; bitField0_ = (bitField0_ & ~0x02000000); appTimeoutsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAppTimeoutsFieldBuilder() : null; } else { appTimeoutsBuilder_.addAllMessages(other.appTimeouts_); } } } if (other.hasLaunchTime()) { setLaunchTime(other.getLaunchTime()); } if (other.hasSubmitTime()) { setSubmitTime(other.getSubmitTime()); } if (other.hasRmClusterId()) { rmClusterId_ = other.rmClusterId_; bitField0_ |= 0x10000000; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasClientToAmToken()) { if (!getClientToAmToken().isInitialized()) { return false; } } if (hasAppResourceUsage()) { if (!getAppResourceUsage().isInitialized()) { return false; } } if (hasAmRmToken()) { if (!getAmRmToken().isInitialized()) { return false; } } for (int i = 0; i < getAppTimeoutsCount(); i++) { if (!getAppTimeouts(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getApplicationIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { user_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { queue_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { name_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { host_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 case 48: { rpcPort_ = input.readInt32(); bitField0_ |= 0x00000020; break; } // case 48 case 58: { input.readMessage( getClientToAmTokenFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000040; break; } // case 58 case 64: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(8, tmpRaw); } else { yarnApplicationState_ = tmpRaw; bitField0_ |= 0x00000080; } break; } // case 64 case 74: { trackingUrl_ = input.readBytes(); bitField0_ |= 0x00000100; break; } // case 74 case 82: { diagnostics_ = input.readBytes(); bitField0_ |= 0x00000200; break; } // case 82 case 88: { startTime_ = input.readInt64(); bitField0_ |= 0x00000400; break; } // case 88 case 96: { finishTime_ = input.readInt64(); bitField0_ |= 0x00000800; break; } // case 96 case 104: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(13, tmpRaw); } else { finalApplicationStatus_ = tmpRaw; bitField0_ |= 0x00001000; } break; } // case 104 case 114: { input.readMessage( getAppResourceUsageFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00002000; break; } // case 114 case 122: { originalTrackingUrl_ = input.readBytes(); bitField0_ |= 0x00004000; break; } // case 122 case 130: { input.readMessage( getCurrentApplicationAttemptIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00008000; break; } // case 130 case 141: { progress_ = input.readFloat(); bitField0_ |= 0x00010000; break; } // case 141 case 146: { applicationType_ = input.readBytes(); bitField0_ |= 0x00020000; break; } // case 146 case 154: { input.readMessage( getAmRmTokenFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00040000; break; } // case 154 case 162: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureApplicationTagsIsMutable(); applicationTags_.add(bs); break; } // case 162 case 168: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(21, tmpRaw); } else { logAggregationStatus_ = tmpRaw; bitField0_ |= 0x00100000; } break; } // case 168 case 176: { unmanagedApplication_ = input.readBool(); bitField0_ |= 0x00200000; break; } // case 176 case 186: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00400000; break; } // case 186 case 194: { appNodeLabelExpression_ = input.readBytes(); bitField0_ |= 0x00800000; break; } // case 194 case 202: { amNodeLabelExpression_ = input.readBytes(); bitField0_ |= 0x01000000; break; } // case 202 case 210: { org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.PARSER, extensionRegistry); if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); appTimeouts_.add(m); } else { appTimeoutsBuilder_.addMessage(m); } break; } // case 210 case 216: { launchTime_ = input.readInt64(); bitField0_ |= 0x04000000; break; } // case 216 case 224: { submitTime_ = input.readInt64(); bitField0_ |= 0x08000000; break; } // case 224 case 234: { rmClusterId_ = input.readBytes(); bitField0_ |= 0x10000000; break; } // case 234 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return Whether the applicationId field is set. */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; * @return The applicationId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder mergeApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { getApplicationIdBuilder().mergeFrom(value); } else { applicationId_ = value; } } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public Builder clearApplicationId() { bitField0_ = (bitField0_ & ~0x00000001); applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto applicationId = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object user_ = ""; /** * optional string user = 2; * @return Whether the user field is set. */ public boolean hasUser() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string user = 2; * @return The user. */ public java.lang.String getUser() { java.lang.Object ref = user_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string user = 2; * @return The bytes for user. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string user = 2; * @param value The user to set. * @return This builder for chaining. */ public Builder setUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } user_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string user = 2; * @return This builder for chaining. */ public Builder clearUser() { user_ = getDefaultInstance().getUser(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string user = 2; * @param value The bytes for user to set. * @return This builder for chaining. */ public Builder setUserBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } user_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object queue_ = ""; /** * optional string queue = 3; * @return Whether the queue field is set. */ public boolean hasQueue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string queue = 3; * @return The queue. */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 3; * @return The bytes for queue. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 3; * @param value The queue to set. * @return This builder for chaining. */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string queue = 3; * @return This builder for chaining. */ public Builder clearQueue() { queue_ = getDefaultInstance().getQueue(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string queue = 3; * @param value The bytes for queue to set. * @return This builder for chaining. */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object name_ = ""; /** * optional string name = 4; * @return Whether the name field is set. */ public boolean hasName() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string name = 4; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string name = 4; * @return The bytes for name. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string name = 4; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string name = 4; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string name = 4; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object host_ = ""; /** * optional string host = 5; * @return Whether the host field is set. */ public boolean hasHost() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string host = 5; * @return The host. */ public java.lang.String getHost() { java.lang.Object ref = host_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string host = 5; * @return The bytes for host. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string host = 5; * @param value The host to set. * @return This builder for chaining. */ public Builder setHost( java.lang.String value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string host = 5; * @return This builder for chaining. */ public Builder clearHost() { host_ = getDefaultInstance().getHost(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string host = 5; * @param value The bytes for host to set. * @return This builder for chaining. */ public Builder setHostBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private int rpcPort_ ; /** * optional int32 rpc_port = 6; * @return Whether the rpcPort field is set. */ @java.lang.Override public boolean hasRpcPort() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 rpc_port = 6; * @return The rpcPort. */ @java.lang.Override public int getRpcPort() { return rpcPort_; } /** * optional int32 rpc_port = 6; * @param value The rpcPort to set. * @return This builder for chaining. */ public Builder setRpcPort(int value) { rpcPort_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int32 rpc_port = 6; * @return This builder for chaining. */ public Builder clearRpcPort() { bitField0_ = (bitField0_ & ~0x00000020); rpcPort_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto clientToAmToken_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> clientToAmTokenBuilder_; /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return Whether the clientToAmToken field is set. */ public boolean hasClientToAmToken() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; * @return The clientToAmToken. */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken() { if (clientToAmTokenBuilder_ == null) { return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_; } else { return clientToAmTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public Builder setClientToAmToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (clientToAmTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } clientToAmToken_ = value; } else { clientToAmTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public Builder setClientToAmToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (clientToAmTokenBuilder_ == null) { clientToAmToken_ = builderForValue.build(); } else { clientToAmTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public Builder mergeClientToAmToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (clientToAmTokenBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0) && clientToAmToken_ != null && clientToAmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { getClientToAmTokenBuilder().mergeFrom(value); } else { clientToAmToken_ = value; } } else { clientToAmTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public Builder clearClientToAmToken() { bitField0_ = (bitField0_ & ~0x00000040); clientToAmToken_ = null; if (clientToAmTokenBuilder_ != null) { clientToAmTokenBuilder_.dispose(); clientToAmTokenBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getClientToAmTokenBuilder() { bitField0_ |= 0x00000040; onChanged(); return getClientToAmTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder() { if (clientToAmTokenBuilder_ != null) { return clientToAmTokenBuilder_.getMessageOrBuilder(); } else { return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_; } } /** * optional .hadoop.common.TokenProto client_to_am_token = 7; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getClientToAmTokenFieldBuilder() { if (clientToAmTokenBuilder_ == null) { clientToAmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getClientToAmToken(), getParentForChildren(), isClean()); clientToAmToken_ = null; } return clientToAmTokenBuilder_; } private int yarnApplicationState_ = 1; /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return Whether the yarnApplicationState field is set. */ @java.lang.Override public boolean hasYarnApplicationState() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return The yarnApplicationState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() { org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(yarnApplicationState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result; } /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @param value The yarnApplicationState to set. * @return This builder for chaining. */ public Builder setYarnApplicationState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; yarnApplicationState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8; * @return This builder for chaining. */ public Builder clearYarnApplicationState() { bitField0_ = (bitField0_ & ~0x00000080); yarnApplicationState_ = 1; onChanged(); return this; } private java.lang.Object trackingUrl_ = ""; /** * optional string trackingUrl = 9; * @return Whether the trackingUrl field is set. */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000100) != 0); } /** * optional string trackingUrl = 9; * @return The trackingUrl. */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string trackingUrl = 9; * @return The bytes for trackingUrl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string trackingUrl = 9; * @param value The trackingUrl to set. * @return This builder for chaining. */ public Builder setTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } trackingUrl_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional string trackingUrl = 9; * @return This builder for chaining. */ public Builder clearTrackingUrl() { trackingUrl_ = getDefaultInstance().getTrackingUrl(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); return this; } /** * optional string trackingUrl = 9; * @param value The bytes for trackingUrl to set. * @return This builder for chaining. */ public Builder setTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } trackingUrl_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } private java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 10 [default = "N/A"]; * @return Whether the diagnostics field is set. */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000200) != 0); } /** * optional string diagnostics = 10 [default = "N/A"]; * @return The diagnostics. */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 10 [default = "N/A"]; * @return The bytes for diagnostics. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 10 [default = "N/A"]; * @param value The diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional string diagnostics = 10 [default = "N/A"]; * @return This builder for chaining. */ public Builder clearDiagnostics() { diagnostics_ = getDefaultInstance().getDiagnostics(); bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * optional string diagnostics = 10 [default = "N/A"]; * @param value The bytes for diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } private long startTime_ ; /** * optional int64 startTime = 11; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 startTime = 11; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } /** * optional int64 startTime = 11; * @param value The startTime to set. * @return This builder for chaining. */ public Builder setStartTime(long value) { startTime_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional int64 startTime = 11; * @return This builder for chaining. */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000400); startTime_ = 0L; onChanged(); return this; } private long finishTime_ ; /** * optional int64 finishTime = 12; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int64 finishTime = 12; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } /** * optional int64 finishTime = 12; * @param value The finishTime to set. * @return This builder for chaining. */ public Builder setFinishTime(long value) { finishTime_ = value; bitField0_ |= 0x00000800; onChanged(); return this; } /** * optional int64 finishTime = 12; * @return This builder for chaining. */ public Builder clearFinishTime() { bitField0_ = (bitField0_ & ~0x00000800); finishTime_ = 0L; onChanged(); return this; } private int finalApplicationStatus_ = 0; /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return Whether the finalApplicationStatus field is set. */ @java.lang.Override public boolean hasFinalApplicationStatus() { return ((bitField0_ & 0x00001000) != 0); } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return The finalApplicationStatus. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() { org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result; } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @param value The finalApplicationStatus to set. * @return This builder for chaining. */ public Builder setFinalApplicationStatus(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00001000; finalApplicationStatus_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13; * @return This builder for chaining. */ public Builder clearFinalApplicationStatus() { bitField0_ = (bitField0_ & ~0x00001000); finalApplicationStatus_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto appResourceUsage_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder> appResourceUsageBuilder_; /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return Whether the appResourceUsage field is set. */ public boolean hasAppResourceUsage() { return ((bitField0_ & 0x00002000) != 0); } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; * @return The appResourceUsage. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage() { if (appResourceUsageBuilder_ == null) { return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_; } else { return appResourceUsageBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public Builder setAppResourceUsage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto value) { if (appResourceUsageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } appResourceUsage_ = value; } else { appResourceUsageBuilder_.setMessage(value); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public Builder setAppResourceUsage( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder builderForValue) { if (appResourceUsageBuilder_ == null) { appResourceUsage_ = builderForValue.build(); } else { appResourceUsageBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public Builder mergeAppResourceUsage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto value) { if (appResourceUsageBuilder_ == null) { if (((bitField0_ & 0x00002000) != 0) && appResourceUsage_ != null && appResourceUsage_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance()) { getAppResourceUsageBuilder().mergeFrom(value); } else { appResourceUsage_ = value; } } else { appResourceUsageBuilder_.mergeFrom(value); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public Builder clearAppResourceUsage() { bitField0_ = (bitField0_ & ~0x00002000); appResourceUsage_ = null; if (appResourceUsageBuilder_ != null) { appResourceUsageBuilder_.dispose(); appResourceUsageBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder getAppResourceUsageBuilder() { bitField0_ |= 0x00002000; onChanged(); return getAppResourceUsageFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder() { if (appResourceUsageBuilder_ != null) { return appResourceUsageBuilder_.getMessageOrBuilder(); } else { return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_; } } /** * optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder> getAppResourceUsageFieldBuilder() { if (appResourceUsageBuilder_ == null) { appResourceUsageBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder>( getAppResourceUsage(), getParentForChildren(), isClean()); appResourceUsage_ = null; } return appResourceUsageBuilder_; } private java.lang.Object originalTrackingUrl_ = ""; /** * optional string originalTrackingUrl = 15; * @return Whether the originalTrackingUrl field is set. */ public boolean hasOriginalTrackingUrl() { return ((bitField0_ & 0x00004000) != 0); } /** * optional string originalTrackingUrl = 15; * @return The originalTrackingUrl. */ public java.lang.String getOriginalTrackingUrl() { java.lang.Object ref = originalTrackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { originalTrackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string originalTrackingUrl = 15; * @return The bytes for originalTrackingUrl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); originalTrackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string originalTrackingUrl = 15; * @param value The originalTrackingUrl to set. * @return This builder for chaining. */ public Builder setOriginalTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } originalTrackingUrl_ = value; bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional string originalTrackingUrl = 15; * @return This builder for chaining. */ public Builder clearOriginalTrackingUrl() { originalTrackingUrl_ = getDefaultInstance().getOriginalTrackingUrl(); bitField0_ = (bitField0_ & ~0x00004000); onChanged(); return this; } /** * optional string originalTrackingUrl = 15; * @param value The bytes for originalTrackingUrl to set. * @return This builder for chaining. */ public Builder setOriginalTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } originalTrackingUrl_ = value; bitField0_ |= 0x00004000; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto currentApplicationAttemptId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> currentApplicationAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return Whether the currentApplicationAttemptId field is set. */ public boolean hasCurrentApplicationAttemptId() { return ((bitField0_ & 0x00008000) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; * @return The currentApplicationAttemptId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId() { if (currentApplicationAttemptIdBuilder_ == null) { return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_; } else { return currentApplicationAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public Builder setCurrentApplicationAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (currentApplicationAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } currentApplicationAttemptId_ = value; } else { currentApplicationAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public Builder setCurrentApplicationAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (currentApplicationAttemptIdBuilder_ == null) { currentApplicationAttemptId_ = builderForValue.build(); } else { currentApplicationAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public Builder mergeCurrentApplicationAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (currentApplicationAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00008000) != 0) && currentApplicationAttemptId_ != null && currentApplicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { getCurrentApplicationAttemptIdBuilder().mergeFrom(value); } else { currentApplicationAttemptId_ = value; } } else { currentApplicationAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public Builder clearCurrentApplicationAttemptId() { bitField0_ = (bitField0_ & ~0x00008000); currentApplicationAttemptId_ = null; if (currentApplicationAttemptIdBuilder_ != null) { currentApplicationAttemptIdBuilder_.dispose(); currentApplicationAttemptIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getCurrentApplicationAttemptIdBuilder() { bitField0_ |= 0x00008000; onChanged(); return getCurrentApplicationAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder() { if (currentApplicationAttemptIdBuilder_ != null) { return currentApplicationAttemptIdBuilder_.getMessageOrBuilder(); } else { return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getCurrentApplicationAttemptIdFieldBuilder() { if (currentApplicationAttemptIdBuilder_ == null) { currentApplicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getCurrentApplicationAttemptId(), getParentForChildren(), isClean()); currentApplicationAttemptId_ = null; } return currentApplicationAttemptIdBuilder_; } private float progress_ ; /** * optional float progress = 17; * @return Whether the progress field is set. */ @java.lang.Override public boolean hasProgress() { return ((bitField0_ & 0x00010000) != 0); } /** * optional float progress = 17; * @return The progress. */ @java.lang.Override public float getProgress() { return progress_; } /** * optional float progress = 17; * @param value The progress to set. * @return This builder for chaining. */ public Builder setProgress(float value) { progress_ = value; bitField0_ |= 0x00010000; onChanged(); return this; } /** * optional float progress = 17; * @return This builder for chaining. */ public Builder clearProgress() { bitField0_ = (bitField0_ & ~0x00010000); progress_ = 0F; onChanged(); return this; } private java.lang.Object applicationType_ = ""; /** * optional string applicationType = 18; * @return Whether the applicationType field is set. */ public boolean hasApplicationType() { return ((bitField0_ & 0x00020000) != 0); } /** * optional string applicationType = 18; * @return The applicationType. */ public java.lang.String getApplicationType() { java.lang.Object ref = applicationType_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string applicationType = 18; * @return The bytes for applicationType. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes() { java.lang.Object ref = applicationType_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string applicationType = 18; * @param value The applicationType to set. * @return This builder for chaining. */ public Builder setApplicationType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } applicationType_ = value; bitField0_ |= 0x00020000; onChanged(); return this; } /** * optional string applicationType = 18; * @return This builder for chaining. */ public Builder clearApplicationType() { applicationType_ = getDefaultInstance().getApplicationType(); bitField0_ = (bitField0_ & ~0x00020000); onChanged(); return this; } /** * optional string applicationType = 18; * @param value The bytes for applicationType to set. * @return This builder for chaining. */ public Builder setApplicationTypeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } applicationType_ = value; bitField0_ |= 0x00020000; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> amRmTokenBuilder_; /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return Whether the amRmToken field is set. */ public boolean hasAmRmToken() { return ((bitField0_ & 0x00040000) != 0); } /** * optional .hadoop.common.TokenProto am_rm_token = 19; * @return The amRmToken. */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() { if (amRmTokenBuilder_ == null) { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } else { return amRmTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public Builder setAmRmToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (amRmTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } amRmToken_ = value; } else { amRmTokenBuilder_.setMessage(value); } bitField0_ |= 0x00040000; onChanged(); return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public Builder setAmRmToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (amRmTokenBuilder_ == null) { amRmToken_ = builderForValue.build(); } else { amRmTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00040000; onChanged(); return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public Builder mergeAmRmToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (amRmTokenBuilder_ == null) { if (((bitField0_ & 0x00040000) != 0) && amRmToken_ != null && amRmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { getAmRmTokenBuilder().mergeFrom(value); } else { amRmToken_ = value; } } else { amRmTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00040000; onChanged(); return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public Builder clearAmRmToken() { bitField0_ = (bitField0_ & ~0x00040000); amRmToken_ = null; if (amRmTokenBuilder_ != null) { amRmTokenBuilder_.dispose(); amRmTokenBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getAmRmTokenBuilder() { bitField0_ |= 0x00040000; onChanged(); return getAmRmTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() { if (amRmTokenBuilder_ != null) { return amRmTokenBuilder_.getMessageOrBuilder(); } else { return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_; } } /** * optional .hadoop.common.TokenProto am_rm_token = 19; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getAmRmTokenFieldBuilder() { if (amRmTokenBuilder_ == null) { amRmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getAmRmToken(), getParentForChildren(), isClean()); amRmToken_ = null; } return amRmTokenBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureApplicationTagsIsMutable() { if (!((bitField0_ & 0x00080000) != 0)) { applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_); bitField0_ |= 0x00080000; } } /** * repeated string applicationTags = 20; * @return A list containing the applicationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_.getUnmodifiableView(); } /** * repeated string applicationTags = 20; * @return The count of applicationTags. */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 20; * @param index The index of the element to return. * @return The applicationTags at the given index. */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 20; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } /** * repeated string applicationTags = 20; * @param index The index to set the value at. * @param value The applicationTags to set. * @return This builder for chaining. */ public Builder setApplicationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.set(index, value); onChanged(); return this; } /** * repeated string applicationTags = 20; * @param value The applicationTags to add. * @return This builder for chaining. */ public Builder addApplicationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } /** * repeated string applicationTags = 20; * @param values The applicationTags to add. * @return This builder for chaining. */ public Builder addAllApplicationTags( java.lang.Iterable values) { ensureApplicationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTags_); onChanged(); return this; } /** * repeated string applicationTags = 20; * @return This builder for chaining. */ public Builder clearApplicationTags() { applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00080000); onChanged(); return this; } /** * repeated string applicationTags = 20; * @param value The bytes of the applicationTags to add. * @return This builder for chaining. */ public Builder addApplicationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } private int logAggregationStatus_ = 1; /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return Whether the logAggregationStatus field is set. */ @java.lang.Override public boolean hasLogAggregationStatus() { return ((bitField0_ & 0x00100000) != 0); } /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return The logAggregationStatus. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() { org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result; } /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @param value The logAggregationStatus to set. * @return This builder for chaining. */ public Builder setLogAggregationStatus(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00100000; logAggregationStatus_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21; * @return This builder for chaining. */ public Builder clearLogAggregationStatus() { bitField0_ = (bitField0_ & ~0x00100000); logAggregationStatus_ = 1; onChanged(); return this; } private boolean unmanagedApplication_ ; /** * optional bool unmanaged_application = 22 [default = false]; * @return Whether the unmanagedApplication field is set. */ @java.lang.Override public boolean hasUnmanagedApplication() { return ((bitField0_ & 0x00200000) != 0); } /** * optional bool unmanaged_application = 22 [default = false]; * @return The unmanagedApplication. */ @java.lang.Override public boolean getUnmanagedApplication() { return unmanagedApplication_; } /** * optional bool unmanaged_application = 22 [default = false]; * @param value The unmanagedApplication to set. * @return This builder for chaining. */ public Builder setUnmanagedApplication(boolean value) { unmanagedApplication_ = value; bitField0_ |= 0x00200000; onChanged(); return this; } /** * optional bool unmanaged_application = 22 [default = false]; * @return This builder for chaining. */ public Builder clearUnmanagedApplication() { bitField0_ = (bitField0_ & ~0x00200000); unmanagedApplication_ = false; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00400000) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 23; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00400000; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00400000; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00400000) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00400000; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00400000); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00400000; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 23; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private java.lang.Object appNodeLabelExpression_ = ""; /** * optional string appNodeLabelExpression = 24; * @return Whether the appNodeLabelExpression field is set. */ public boolean hasAppNodeLabelExpression() { return ((bitField0_ & 0x00800000) != 0); } /** * optional string appNodeLabelExpression = 24; * @return The appNodeLabelExpression. */ public java.lang.String getAppNodeLabelExpression() { java.lang.Object ref = appNodeLabelExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { appNodeLabelExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string appNodeLabelExpression = 24; * @return The bytes for appNodeLabelExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAppNodeLabelExpressionBytes() { java.lang.Object ref = appNodeLabelExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); appNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string appNodeLabelExpression = 24; * @param value The appNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setAppNodeLabelExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } appNodeLabelExpression_ = value; bitField0_ |= 0x00800000; onChanged(); return this; } /** * optional string appNodeLabelExpression = 24; * @return This builder for chaining. */ public Builder clearAppNodeLabelExpression() { appNodeLabelExpression_ = getDefaultInstance().getAppNodeLabelExpression(); bitField0_ = (bitField0_ & ~0x00800000); onChanged(); return this; } /** * optional string appNodeLabelExpression = 24; * @param value The bytes for appNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setAppNodeLabelExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } appNodeLabelExpression_ = value; bitField0_ |= 0x00800000; onChanged(); return this; } private java.lang.Object amNodeLabelExpression_ = ""; /** * optional string amNodeLabelExpression = 25; * @return Whether the amNodeLabelExpression field is set. */ public boolean hasAmNodeLabelExpression() { return ((bitField0_ & 0x01000000) != 0); } /** * optional string amNodeLabelExpression = 25; * @return The amNodeLabelExpression. */ public java.lang.String getAmNodeLabelExpression() { java.lang.Object ref = amNodeLabelExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { amNodeLabelExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string amNodeLabelExpression = 25; * @return The bytes for amNodeLabelExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAmNodeLabelExpressionBytes() { java.lang.Object ref = amNodeLabelExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); amNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string amNodeLabelExpression = 25; * @param value The amNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setAmNodeLabelExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } amNodeLabelExpression_ = value; bitField0_ |= 0x01000000; onChanged(); return this; } /** * optional string amNodeLabelExpression = 25; * @return This builder for chaining. */ public Builder clearAmNodeLabelExpression() { amNodeLabelExpression_ = getDefaultInstance().getAmNodeLabelExpression(); bitField0_ = (bitField0_ & ~0x01000000); onChanged(); return this; } /** * optional string amNodeLabelExpression = 25; * @param value The bytes for amNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setAmNodeLabelExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } amNodeLabelExpression_ = value; bitField0_ |= 0x01000000; onChanged(); return this; } private java.util.List appTimeouts_ = java.util.Collections.emptyList(); private void ensureAppTimeoutsIsMutable() { if (!((bitField0_ & 0x02000000) != 0)) { appTimeouts_ = new java.util.ArrayList(appTimeouts_); bitField0_ |= 0x02000000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> appTimeoutsBuilder_; /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public java.util.List getAppTimeoutsList() { if (appTimeoutsBuilder_ == null) { return java.util.Collections.unmodifiableList(appTimeouts_); } else { return appTimeoutsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public int getAppTimeoutsCount() { if (appTimeoutsBuilder_ == null) { return appTimeouts_.size(); } else { return appTimeoutsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index) { if (appTimeoutsBuilder_ == null) { return appTimeouts_.get(index); } else { return appTimeoutsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder setAppTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) { if (appTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAppTimeoutsIsMutable(); appTimeouts_.set(index, value); onChanged(); } else { appTimeoutsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder setAppTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) { if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); appTimeouts_.set(index, builderForValue.build()); onChanged(); } else { appTimeoutsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder addAppTimeouts(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) { if (appTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAppTimeoutsIsMutable(); appTimeouts_.add(value); onChanged(); } else { appTimeoutsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder addAppTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) { if (appTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAppTimeoutsIsMutable(); appTimeouts_.add(index, value); onChanged(); } else { appTimeoutsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder addAppTimeouts( org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) { if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); appTimeouts_.add(builderForValue.build()); onChanged(); } else { appTimeoutsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder addAppTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) { if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); appTimeouts_.add(index, builderForValue.build()); onChanged(); } else { appTimeoutsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder addAllAppTimeouts( java.lang.Iterable values) { if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, appTimeouts_); onChanged(); } else { appTimeoutsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder clearAppTimeouts() { if (appTimeoutsBuilder_ == null) { appTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x02000000); onChanged(); } else { appTimeoutsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public Builder removeAppTimeouts(int index) { if (appTimeoutsBuilder_ == null) { ensureAppTimeoutsIsMutable(); appTimeouts_.remove(index); onChanged(); } else { appTimeoutsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder getAppTimeoutsBuilder( int index) { return getAppTimeoutsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder( int index) { if (appTimeoutsBuilder_ == null) { return appTimeouts_.get(index); } else { return appTimeoutsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public java.util.List getAppTimeoutsOrBuilderList() { if (appTimeoutsBuilder_ != null) { return appTimeoutsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(appTimeouts_); } } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder addAppTimeoutsBuilder() { return getAppTimeoutsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder addAppTimeoutsBuilder( int index) { return getAppTimeoutsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26; */ public java.util.List getAppTimeoutsBuilderList() { return getAppTimeoutsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> getAppTimeoutsFieldBuilder() { if (appTimeoutsBuilder_ == null) { appTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder>( appTimeouts_, ((bitField0_ & 0x02000000) != 0), getParentForChildren(), isClean()); appTimeouts_ = null; } return appTimeoutsBuilder_; } private long launchTime_ ; /** * optional int64 launchTime = 27; * @return Whether the launchTime field is set. */ @java.lang.Override public boolean hasLaunchTime() { return ((bitField0_ & 0x04000000) != 0); } /** * optional int64 launchTime = 27; * @return The launchTime. */ @java.lang.Override public long getLaunchTime() { return launchTime_; } /** * optional int64 launchTime = 27; * @param value The launchTime to set. * @return This builder for chaining. */ public Builder setLaunchTime(long value) { launchTime_ = value; bitField0_ |= 0x04000000; onChanged(); return this; } /** * optional int64 launchTime = 27; * @return This builder for chaining. */ public Builder clearLaunchTime() { bitField0_ = (bitField0_ & ~0x04000000); launchTime_ = 0L; onChanged(); return this; } private long submitTime_ ; /** * optional int64 submitTime = 28; * @return Whether the submitTime field is set. */ @java.lang.Override public boolean hasSubmitTime() { return ((bitField0_ & 0x08000000) != 0); } /** * optional int64 submitTime = 28; * @return The submitTime. */ @java.lang.Override public long getSubmitTime() { return submitTime_; } /** * optional int64 submitTime = 28; * @param value The submitTime to set. * @return This builder for chaining. */ public Builder setSubmitTime(long value) { submitTime_ = value; bitField0_ |= 0x08000000; onChanged(); return this; } /** * optional int64 submitTime = 28; * @return This builder for chaining. */ public Builder clearSubmitTime() { bitField0_ = (bitField0_ & ~0x08000000); submitTime_ = 0L; onChanged(); return this; } private java.lang.Object rmClusterId_ = ""; /** * optional string rmClusterId = 29; * @return Whether the rmClusterId field is set. */ public boolean hasRmClusterId() { return ((bitField0_ & 0x10000000) != 0); } /** * optional string rmClusterId = 29; * @return The rmClusterId. */ public java.lang.String getRmClusterId() { java.lang.Object ref = rmClusterId_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rmClusterId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string rmClusterId = 29; * @return The bytes for rmClusterId. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRmClusterIdBytes() { java.lang.Object ref = rmClusterId_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rmClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string rmClusterId = 29; * @param value The rmClusterId to set. * @return This builder for chaining. */ public Builder setRmClusterId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } rmClusterId_ = value; bitField0_ |= 0x10000000; onChanged(); return this; } /** * optional string rmClusterId = 29; * @return This builder for chaining. */ public Builder clearRmClusterId() { rmClusterId_ = getDefaultInstance().getRmClusterId(); bitField0_ = (bitField0_ & ~0x10000000); onChanged(); return this; } /** * optional string rmClusterId = 29; * @param value The bytes for rmClusterId to set. * @return This builder for chaining. */ public Builder setRmClusterIdBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } rmClusterId_ = value; bitField0_ |= 0x10000000; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationReportProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationReportProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationReportProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface AppTimeoutsMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.AppTimeoutsMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ boolean hasApplicationTimeoutType(); /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType(); /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return Whether the applicationTimeout field is set. */ boolean hasApplicationTimeout(); /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return The applicationTimeout. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout(); /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.AppTimeoutsMapProto} */ public static final class AppTimeoutsMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.AppTimeoutsMapProto) AppTimeoutsMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use AppTimeoutsMapProto.newBuilder() to construct. private AppTimeoutsMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private AppTimeoutsMapProto() { applicationTimeoutType_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AppTimeoutsMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder.class); } private int bitField0_; public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } public static final int APPLICATION_TIMEOUT_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto applicationTimeout_; /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return Whether the applicationTimeout field is set. */ @java.lang.Override public boolean hasApplicationTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return The applicationTimeout. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout() { return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_; } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder() { return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasApplicationTimeout()) { if (!getApplicationTimeout().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getApplicationTimeout()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getApplicationTimeout()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto) obj; if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false; if (hasApplicationTimeoutType()) { if (applicationTimeoutType_ != other.applicationTimeoutType_) return false; } if (hasApplicationTimeout() != other.hasApplicationTimeout()) return false; if (hasApplicationTimeout()) { if (!getApplicationTimeout() .equals(other.getApplicationTimeout())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationTimeoutType()) { hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + applicationTimeoutType_; } if (hasApplicationTimeout()) { hash = (37 * hash) + APPLICATION_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getApplicationTimeout().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.AppTimeoutsMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.AppTimeoutsMapProto) org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationTimeoutFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationTimeoutType_ = 1; applicationTimeout_ = null; if (applicationTimeoutBuilder_ != null) { applicationTimeoutBuilder_.dispose(); applicationTimeoutBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationTimeoutType_ = applicationTimeoutType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.applicationTimeout_ = applicationTimeoutBuilder_ == null ? applicationTimeout_ : applicationTimeoutBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance()) return this; if (other.hasApplicationTimeoutType()) { setApplicationTimeoutType(other.getApplicationTimeoutType()); } if (other.hasApplicationTimeout()) { mergeApplicationTimeout(other.getApplicationTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasApplicationTimeout()) { if (!getApplicationTimeout().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { applicationTimeoutType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { input.readMessage( getApplicationTimeoutFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @param value The applicationTimeoutType to set. * @return This builder for chaining. */ public Builder setApplicationTimeoutType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; applicationTimeoutType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return This builder for chaining. */ public Builder clearApplicationTimeoutType() { bitField0_ = (bitField0_ & ~0x00000001); applicationTimeoutType_ = 1; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto applicationTimeout_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder> applicationTimeoutBuilder_; /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return Whether the applicationTimeout field is set. */ public boolean hasApplicationTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; * @return The applicationTimeout. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout() { if (applicationTimeoutBuilder_ == null) { return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_; } else { return applicationTimeoutBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public Builder setApplicationTimeout(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto value) { if (applicationTimeoutBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationTimeout_ = value; } else { applicationTimeoutBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public Builder setApplicationTimeout( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder builderForValue) { if (applicationTimeoutBuilder_ == null) { applicationTimeout_ = builderForValue.build(); } else { applicationTimeoutBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public Builder mergeApplicationTimeout(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto value) { if (applicationTimeoutBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && applicationTimeout_ != null && applicationTimeout_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance()) { getApplicationTimeoutBuilder().mergeFrom(value); } else { applicationTimeout_ = value; } } else { applicationTimeoutBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public Builder clearApplicationTimeout() { bitField0_ = (bitField0_ & ~0x00000002); applicationTimeout_ = null; if (applicationTimeoutBuilder_ != null) { applicationTimeoutBuilder_.dispose(); applicationTimeoutBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder getApplicationTimeoutBuilder() { bitField0_ |= 0x00000002; onChanged(); return getApplicationTimeoutFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder() { if (applicationTimeoutBuilder_ != null) { return applicationTimeoutBuilder_.getMessageOrBuilder(); } else { return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_; } } /** * optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder> getApplicationTimeoutFieldBuilder() { if (applicationTimeoutBuilder_ == null) { applicationTimeoutBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder>( getApplicationTimeout(), getParentForChildren(), isClean()); applicationTimeout_ = null; } return applicationTimeoutBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.AppTimeoutsMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.AppTimeoutsMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public AppTimeoutsMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationTimeoutProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationTimeoutProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ boolean hasApplicationTimeoutType(); /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType(); /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ boolean hasExpireTime(); /** * optional string expire_time = 2; * @return The expireTime. */ java.lang.String getExpireTime(); /** * optional string expire_time = 2; * @return The bytes for expireTime. */ org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes(); /** * optional int64 remaining_time = 3; * @return Whether the remainingTime field is set. */ boolean hasRemainingTime(); /** * optional int64 remaining_time = 3; * @return The remainingTime. */ long getRemainingTime(); } /** * Protobuf type {@code hadoop.yarn.ApplicationTimeoutProto} */ public static final class ApplicationTimeoutProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationTimeoutProto) ApplicationTimeoutProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationTimeoutProto.newBuilder() to construct. private ApplicationTimeoutProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationTimeoutProto() { applicationTimeoutType_ = 1; expireTime_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationTimeoutProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder.class); } private int bitField0_; public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1; private int applicationTimeoutType_ = 1; /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } public static final int EXPIRE_TIME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object expireTime_ = ""; /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ @java.lang.Override public boolean hasExpireTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string expire_time = 2; * @return The expireTime. */ @java.lang.Override public java.lang.String getExpireTime() { java.lang.Object ref = expireTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expireTime_ = s; } return s; } } /** * optional string expire_time = 2; * @return The bytes for expireTime. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes() { java.lang.Object ref = expireTime_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expireTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int REMAINING_TIME_FIELD_NUMBER = 3; private long remainingTime_ = 0L; /** * optional int64 remaining_time = 3; * @return Whether the remainingTime field is set. */ @java.lang.Override public boolean hasRemainingTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 remaining_time = 3; * @return The remainingTime. */ @java.lang.Override public long getRemainingTime() { return remainingTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasApplicationTimeoutType()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, expireTime_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(3, remainingTime_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, expireTime_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, remainingTime_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto) obj; if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false; if (hasApplicationTimeoutType()) { if (applicationTimeoutType_ != other.applicationTimeoutType_) return false; } if (hasExpireTime() != other.hasExpireTime()) return false; if (hasExpireTime()) { if (!getExpireTime() .equals(other.getExpireTime())) return false; } if (hasRemainingTime() != other.hasRemainingTime()) return false; if (hasRemainingTime()) { if (getRemainingTime() != other.getRemainingTime()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationTimeoutType()) { hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + applicationTimeoutType_; } if (hasExpireTime()) { hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER; hash = (53 * hash) + getExpireTime().hashCode(); } if (hasRemainingTime()) { hash = (37 * hash) + REMAINING_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getRemainingTime()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationTimeoutProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationTimeoutProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationTimeoutType_ = 1; expireTime_ = ""; remainingTime_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationTimeoutType_ = applicationTimeoutType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.expireTime_ = expireTime_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.remainingTime_ = remainingTime_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance()) return this; if (other.hasApplicationTimeoutType()) { setApplicationTimeoutType(other.getApplicationTimeoutType()); } if (other.hasExpireTime()) { expireTime_ = other.expireTime_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasRemainingTime()) { setRemainingTime(other.getRemainingTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasApplicationTimeoutType()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { applicationTimeoutType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { expireTime_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { remainingTime_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int applicationTimeoutType_ = 1; /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @param value The applicationTimeoutType to set. * @return This builder for chaining. */ public Builder setApplicationTimeoutType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; applicationTimeoutType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return This builder for chaining. */ public Builder clearApplicationTimeoutType() { bitField0_ = (bitField0_ & ~0x00000001); applicationTimeoutType_ = 1; onChanged(); return this; } private java.lang.Object expireTime_ = ""; /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ public boolean hasExpireTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string expire_time = 2; * @return The expireTime. */ public java.lang.String getExpireTime() { java.lang.Object ref = expireTime_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expireTime_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string expire_time = 2; * @return The bytes for expireTime. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes() { java.lang.Object ref = expireTime_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expireTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string expire_time = 2; * @param value The expireTime to set. * @return This builder for chaining. */ public Builder setExpireTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } expireTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string expire_time = 2; * @return This builder for chaining. */ public Builder clearExpireTime() { expireTime_ = getDefaultInstance().getExpireTime(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string expire_time = 2; * @param value The bytes for expireTime to set. * @return This builder for chaining. */ public Builder setExpireTimeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } expireTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long remainingTime_ ; /** * optional int64 remaining_time = 3; * @return Whether the remainingTime field is set. */ @java.lang.Override public boolean hasRemainingTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 remaining_time = 3; * @return The remainingTime. */ @java.lang.Override public long getRemainingTime() { return remainingTime_; } /** * optional int64 remaining_time = 3; * @param value The remainingTime to set. * @return This builder for chaining. */ public Builder setRemainingTime(long value) { remainingTime_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 remaining_time = 3; * @return This builder for chaining. */ public Builder clearRemainingTime() { bitField0_ = (bitField0_ & ~0x00000004); remainingTime_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationTimeoutProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationTimeoutProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationTimeoutProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationAttemptReportProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptReportProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return Whether the applicationAttemptId field is set. */ boolean hasApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return The applicationAttemptId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId(); /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder(); /** * optional string host = 2; * @return Whether the host field is set. */ boolean hasHost(); /** * optional string host = 2; * @return The host. */ java.lang.String getHost(); /** * optional string host = 2; * @return The bytes for host. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes(); /** * optional int32 rpc_port = 3; * @return Whether the rpcPort field is set. */ boolean hasRpcPort(); /** * optional int32 rpc_port = 3; * @return The rpcPort. */ int getRpcPort(); /** * optional string tracking_url = 4; * @return Whether the trackingUrl field is set. */ boolean hasTrackingUrl(); /** * optional string tracking_url = 4; * @return The trackingUrl. */ java.lang.String getTrackingUrl(); /** * optional string tracking_url = 4; * @return The bytes for trackingUrl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes(); /** * optional string diagnostics = 5 [default = "N/A"]; * @return Whether the diagnostics field is set. */ boolean hasDiagnostics(); /** * optional string diagnostics = 5 [default = "N/A"]; * @return The diagnostics. */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 5 [default = "N/A"]; * @return The bytes for diagnostics. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return Whether the yarnApplicationAttemptState field is set. */ boolean hasYarnApplicationAttemptState(); /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return The yarnApplicationAttemptState. */ org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState(); /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return Whether the amContainerId field is set. */ boolean hasAmContainerId(); /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return The amContainerId. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId(); /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder(); /** * optional string original_tracking_url = 8; * @return Whether the originalTrackingUrl field is set. */ boolean hasOriginalTrackingUrl(); /** * optional string original_tracking_url = 8; * @return The originalTrackingUrl. */ java.lang.String getOriginalTrackingUrl(); /** * optional string original_tracking_url = 8; * @return The bytes for originalTrackingUrl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes(); /** * optional int64 startTime = 9; * @return Whether the startTime field is set. */ boolean hasStartTime(); /** * optional int64 startTime = 9; * @return The startTime. */ long getStartTime(); /** * optional int64 finishTime = 10; * @return Whether the finishTime field is set. */ boolean hasFinishTime(); /** * optional int64 finishTime = 10; * @return The finishTime. */ long getFinishTime(); } /** * Protobuf type {@code hadoop.yarn.ApplicationAttemptReportProto} */ public static final class ApplicationAttemptReportProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptReportProto) ApplicationAttemptReportProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationAttemptReportProto.newBuilder() to construct. private ApplicationAttemptReportProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationAttemptReportProto() { host_ = ""; trackingUrl_ = ""; diagnostics_ = "N/A"; yarnApplicationAttemptState_ = 1; originalTrackingUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationAttemptReportProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return Whether the applicationAttemptId field is set. */ @java.lang.Override public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return The applicationAttemptId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } public static final int HOST_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object host_ = ""; /** * optional string host = 2; * @return Whether the host field is set. */ @java.lang.Override public boolean hasHost() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string host = 2; * @return The host. */ @java.lang.Override public java.lang.String getHost() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } } /** * optional string host = 2; * @return The bytes for host. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RPC_PORT_FIELD_NUMBER = 3; private int rpcPort_ = 0; /** * optional int32 rpc_port = 3; * @return Whether the rpcPort field is set. */ @java.lang.Override public boolean hasRpcPort() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 rpc_port = 3; * @return The rpcPort. */ @java.lang.Override public int getRpcPort() { return rpcPort_; } public static final int TRACKING_URL_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object trackingUrl_ = ""; /** * optional string tracking_url = 4; * @return Whether the trackingUrl field is set. */ @java.lang.Override public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string tracking_url = 4; * @return The trackingUrl. */ @java.lang.Override public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } } /** * optional string tracking_url = 4; * @return The bytes for trackingUrl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int DIAGNOSTICS_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 5 [default = "N/A"]; * @return Whether the diagnostics field is set. */ @java.lang.Override public boolean hasDiagnostics() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string diagnostics = 5 [default = "N/A"]; * @return The diagnostics. */ @java.lang.Override public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 5 [default = "N/A"]; * @return The bytes for diagnostics. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER = 6; private int yarnApplicationAttemptState_ = 1; /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return Whether the yarnApplicationAttemptState field is set. */ @java.lang.Override public boolean hasYarnApplicationAttemptState() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return The yarnApplicationAttemptState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() { org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(yarnApplicationAttemptState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result; } public static final int AM_CONTAINER_ID_FIELD_NUMBER = 7; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto amContainerId_; /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return Whether the amContainerId field is set. */ @java.lang.Override public boolean hasAmContainerId() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return The amContainerId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId() { return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_; } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder() { return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_; } public static final int ORIGINAL_TRACKING_URL_FIELD_NUMBER = 8; @SuppressWarnings("serial") private volatile java.lang.Object originalTrackingUrl_ = ""; /** * optional string original_tracking_url = 8; * @return Whether the originalTrackingUrl field is set. */ @java.lang.Override public boolean hasOriginalTrackingUrl() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string original_tracking_url = 8; * @return The originalTrackingUrl. */ @java.lang.Override public java.lang.String getOriginalTrackingUrl() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { originalTrackingUrl_ = s; } return s; } } /** * optional string original_tracking_url = 8; * @return The bytes for originalTrackingUrl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); originalTrackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int STARTTIME_FIELD_NUMBER = 9; private long startTime_ = 0L; /** * optional int64 startTime = 9; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 startTime = 9; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } public static final int FINISHTIME_FIELD_NUMBER = 10; private long finishTime_ = 0L; /** * optional int64 finishTime = 10; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 finishTime = 10; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationAttemptId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(3, rpcPort_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, trackingUrl_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, diagnostics_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeEnum(6, yarnApplicationAttemptState_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(7, getAmContainerId()); } if (((bitField0_ & 0x00000080) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, originalTrackingUrl_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeInt64(9, startTime_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeInt64(10, finishTime_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationAttemptId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, rpcPort_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, trackingUrl_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, diagnostics_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(6, yarnApplicationAttemptState_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, getAmContainerId()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, originalTrackingUrl_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(9, startTime_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(10, finishTime_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto) obj; if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false; if (hasApplicationAttemptId()) { if (!getApplicationAttemptId() .equals(other.getApplicationAttemptId())) return false; } if (hasHost() != other.hasHost()) return false; if (hasHost()) { if (!getHost() .equals(other.getHost())) return false; } if (hasRpcPort() != other.hasRpcPort()) return false; if (hasRpcPort()) { if (getRpcPort() != other.getRpcPort()) return false; } if (hasTrackingUrl() != other.hasTrackingUrl()) return false; if (hasTrackingUrl()) { if (!getTrackingUrl() .equals(other.getTrackingUrl())) return false; } if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (hasYarnApplicationAttemptState() != other.hasYarnApplicationAttemptState()) return false; if (hasYarnApplicationAttemptState()) { if (yarnApplicationAttemptState_ != other.yarnApplicationAttemptState_) return false; } if (hasAmContainerId() != other.hasAmContainerId()) return false; if (hasAmContainerId()) { if (!getAmContainerId() .equals(other.getAmContainerId())) return false; } if (hasOriginalTrackingUrl() != other.hasOriginalTrackingUrl()) return false; if (hasOriginalTrackingUrl()) { if (!getOriginalTrackingUrl() .equals(other.getOriginalTrackingUrl())) return false; } if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (getStartTime() != other.getStartTime()) return false; } if (hasFinishTime() != other.hasFinishTime()) return false; if (hasFinishTime()) { if (getFinishTime() != other.getFinishTime()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationAttemptId()) { hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationAttemptId().hashCode(); } if (hasHost()) { hash = (37 * hash) + HOST_FIELD_NUMBER; hash = (53 * hash) + getHost().hashCode(); } if (hasRpcPort()) { hash = (37 * hash) + RPC_PORT_FIELD_NUMBER; hash = (53 * hash) + getRpcPort(); } if (hasTrackingUrl()) { hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER; hash = (53 * hash) + getTrackingUrl().hashCode(); } if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } if (hasYarnApplicationAttemptState()) { hash = (37 * hash) + YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER; hash = (53 * hash) + yarnApplicationAttemptState_; } if (hasAmContainerId()) { hash = (37 * hash) + AM_CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getAmContainerId().hashCode(); } if (hasOriginalTrackingUrl()) { hash = (37 * hash) + ORIGINAL_TRACKING_URL_FIELD_NUMBER; hash = (53 * hash) + getOriginalTrackingUrl().hashCode(); } if (hasStartTime()) { hash = (37 * hash) + STARTTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartTime()); } if (hasFinishTime()) { hash = (37 * hash) + FINISHTIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFinishTime()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationAttemptReportProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptReportProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationAttemptIdFieldBuilder(); getAmContainerIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationAttemptId_ = null; if (applicationAttemptIdBuilder_ != null) { applicationAttemptIdBuilder_.dispose(); applicationAttemptIdBuilder_ = null; } host_ = ""; rpcPort_ = 0; trackingUrl_ = ""; diagnostics_ = "N/A"; yarnApplicationAttemptState_ = 1; amContainerId_ = null; if (amContainerIdBuilder_ != null) { amContainerIdBuilder_.dispose(); amContainerIdBuilder_ = null; } originalTrackingUrl_ = ""; startTime_ = 0L; finishTime_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationAttemptId_ = applicationAttemptIdBuilder_ == null ? applicationAttemptId_ : applicationAttemptIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.host_ = host_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.rpcPort_ = rpcPort_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.trackingUrl_ = trackingUrl_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.diagnostics_ = diagnostics_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.yarnApplicationAttemptState_ = yarnApplicationAttemptState_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.amContainerId_ = amContainerIdBuilder_ == null ? amContainerId_ : amContainerIdBuilder_.build(); to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.originalTrackingUrl_ = originalTrackingUrl_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.startTime_ = startTime_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.finishTime_ = finishTime_; to_bitField0_ |= 0x00000200; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()) return this; if (other.hasApplicationAttemptId()) { mergeApplicationAttemptId(other.getApplicationAttemptId()); } if (other.hasHost()) { host_ = other.host_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasRpcPort()) { setRpcPort(other.getRpcPort()); } if (other.hasTrackingUrl()) { trackingUrl_ = other.trackingUrl_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasDiagnostics()) { diagnostics_ = other.diagnostics_; bitField0_ |= 0x00000010; onChanged(); } if (other.hasYarnApplicationAttemptState()) { setYarnApplicationAttemptState(other.getYarnApplicationAttemptState()); } if (other.hasAmContainerId()) { mergeAmContainerId(other.getAmContainerId()); } if (other.hasOriginalTrackingUrl()) { originalTrackingUrl_ = other.originalTrackingUrl_; bitField0_ |= 0x00000080; onChanged(); } if (other.hasStartTime()) { setStartTime(other.getStartTime()); } if (other.hasFinishTime()) { setFinishTime(other.getFinishTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getApplicationAttemptIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { host_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { rpcPort_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { trackingUrl_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { diagnostics_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 case 48: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(6, tmpRaw); } else { yarnApplicationAttemptState_ = tmpRaw; bitField0_ |= 0x00000020; } break; } // case 48 case 58: { input.readMessage( getAmContainerIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000040; break; } // case 58 case 66: { originalTrackingUrl_ = input.readBytes(); bitField0_ |= 0x00000080; break; } // case 66 case 72: { startTime_ = input.readInt64(); bitField0_ |= 0x00000100; break; } // case 72 case 80: { finishTime_ = input.readInt64(); bitField0_ |= 0x00000200; break; } // case 80 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_; /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return Whether the applicationAttemptId field is set. */ public boolean hasApplicationAttemptId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; * @return The applicationAttemptId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() { if (applicationAttemptIdBuilder_ == null) { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } else { return applicationAttemptIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationAttemptId_ = value; } else { applicationAttemptIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder setApplicationAttemptId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) { if (applicationAttemptIdBuilder_ == null) { applicationAttemptId_ = builderForValue.build(); } else { applicationAttemptIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder mergeApplicationAttemptId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) { if (applicationAttemptIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationAttemptId_ != null && applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) { getApplicationAttemptIdBuilder().mergeFrom(value); } else { applicationAttemptId_ = value; } } else { applicationAttemptIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public Builder clearApplicationAttemptId() { bitField0_ = (bitField0_ & ~0x00000001); applicationAttemptId_ = null; if (applicationAttemptIdBuilder_ != null) { applicationAttemptIdBuilder_.dispose(); applicationAttemptIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationAttemptIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() { if (applicationAttemptIdBuilder_ != null) { return applicationAttemptIdBuilder_.getMessageOrBuilder(); } else { return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_; } } /** * optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> getApplicationAttemptIdFieldBuilder() { if (applicationAttemptIdBuilder_ == null) { applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>( getApplicationAttemptId(), getParentForChildren(), isClean()); applicationAttemptId_ = null; } return applicationAttemptIdBuilder_; } private java.lang.Object host_ = ""; /** * optional string host = 2; * @return Whether the host field is set. */ public boolean hasHost() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string host = 2; * @return The host. */ public java.lang.String getHost() { java.lang.Object ref = host_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string host = 2; * @return The bytes for host. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string host = 2; * @param value The host to set. * @return This builder for chaining. */ public Builder setHost( java.lang.String value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string host = 2; * @return This builder for chaining. */ public Builder clearHost() { host_ = getDefaultInstance().getHost(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string host = 2; * @param value The bytes for host to set. * @return This builder for chaining. */ public Builder setHostBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int rpcPort_ ; /** * optional int32 rpc_port = 3; * @return Whether the rpcPort field is set. */ @java.lang.Override public boolean hasRpcPort() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 rpc_port = 3; * @return The rpcPort. */ @java.lang.Override public int getRpcPort() { return rpcPort_; } /** * optional int32 rpc_port = 3; * @param value The rpcPort to set. * @return This builder for chaining. */ public Builder setRpcPort(int value) { rpcPort_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 rpc_port = 3; * @return This builder for chaining. */ public Builder clearRpcPort() { bitField0_ = (bitField0_ & ~0x00000004); rpcPort_ = 0; onChanged(); return this; } private java.lang.Object trackingUrl_ = ""; /** * optional string tracking_url = 4; * @return Whether the trackingUrl field is set. */ public boolean hasTrackingUrl() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string tracking_url = 4; * @return The trackingUrl. */ public java.lang.String getTrackingUrl() { java.lang.Object ref = trackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { trackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string tracking_url = 4; * @return The bytes for trackingUrl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTrackingUrlBytes() { java.lang.Object ref = trackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); trackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string tracking_url = 4; * @param value The trackingUrl to set. * @return This builder for chaining. */ public Builder setTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } trackingUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string tracking_url = 4; * @return This builder for chaining. */ public Builder clearTrackingUrl() { trackingUrl_ = getDefaultInstance().getTrackingUrl(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string tracking_url = 4; * @param value The bytes for trackingUrl to set. * @return This builder for chaining. */ public Builder setTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } trackingUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 5 [default = "N/A"]; * @return Whether the diagnostics field is set. */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string diagnostics = 5 [default = "N/A"]; * @return The diagnostics. */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 5 [default = "N/A"]; * @return The bytes for diagnostics. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 5 [default = "N/A"]; * @param value The diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string diagnostics = 5 [default = "N/A"]; * @return This builder for chaining. */ public Builder clearDiagnostics() { diagnostics_ = getDefaultInstance().getDiagnostics(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string diagnostics = 5 [default = "N/A"]; * @param value The bytes for diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private int yarnApplicationAttemptState_ = 1; /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return Whether the yarnApplicationAttemptState field is set. */ @java.lang.Override public boolean hasYarnApplicationAttemptState() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return The yarnApplicationAttemptState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() { org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(yarnApplicationAttemptState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result; } /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @param value The yarnApplicationAttemptState to set. * @return This builder for chaining. */ public Builder setYarnApplicationAttemptState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; yarnApplicationAttemptState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6; * @return This builder for chaining. */ public Builder clearYarnApplicationAttemptState() { bitField0_ = (bitField0_ & ~0x00000020); yarnApplicationAttemptState_ = 1; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto amContainerId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> amContainerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return Whether the amContainerId field is set. */ public boolean hasAmContainerId() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; * @return The amContainerId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId() { if (amContainerIdBuilder_ == null) { return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_; } else { return amContainerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public Builder setAmContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (amContainerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } amContainerId_ = value; } else { amContainerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public Builder setAmContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (amContainerIdBuilder_ == null) { amContainerId_ = builderForValue.build(); } else { amContainerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public Builder mergeAmContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (amContainerIdBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0) && amContainerId_ != null && amContainerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { getAmContainerIdBuilder().mergeFrom(value); } else { amContainerId_ = value; } } else { amContainerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public Builder clearAmContainerId() { bitField0_ = (bitField0_ & ~0x00000040); amContainerId_ = null; if (amContainerIdBuilder_ != null) { amContainerIdBuilder_.dispose(); amContainerIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getAmContainerIdBuilder() { bitField0_ |= 0x00000040; onChanged(); return getAmContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder() { if (amContainerIdBuilder_ != null) { return amContainerIdBuilder_.getMessageOrBuilder(); } else { return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_; } } /** * optional .hadoop.yarn.ContainerIdProto am_container_id = 7; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getAmContainerIdFieldBuilder() { if (amContainerIdBuilder_ == null) { amContainerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getAmContainerId(), getParentForChildren(), isClean()); amContainerId_ = null; } return amContainerIdBuilder_; } private java.lang.Object originalTrackingUrl_ = ""; /** * optional string original_tracking_url = 8; * @return Whether the originalTrackingUrl field is set. */ public boolean hasOriginalTrackingUrl() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string original_tracking_url = 8; * @return The originalTrackingUrl. */ public java.lang.String getOriginalTrackingUrl() { java.lang.Object ref = originalTrackingUrl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { originalTrackingUrl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string original_tracking_url = 8; * @return The bytes for originalTrackingUrl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getOriginalTrackingUrlBytes() { java.lang.Object ref = originalTrackingUrl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); originalTrackingUrl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string original_tracking_url = 8; * @param value The originalTrackingUrl to set. * @return This builder for chaining. */ public Builder setOriginalTrackingUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } originalTrackingUrl_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional string original_tracking_url = 8; * @return This builder for chaining. */ public Builder clearOriginalTrackingUrl() { originalTrackingUrl_ = getDefaultInstance().getOriginalTrackingUrl(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * optional string original_tracking_url = 8; * @param value The bytes for originalTrackingUrl to set. * @return This builder for chaining. */ public Builder setOriginalTrackingUrlBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } originalTrackingUrl_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } private long startTime_ ; /** * optional int64 startTime = 9; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 startTime = 9; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } /** * optional int64 startTime = 9; * @param value The startTime to set. * @return This builder for chaining. */ public Builder setStartTime(long value) { startTime_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional int64 startTime = 9; * @return This builder for chaining. */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000100); startTime_ = 0L; onChanged(); return this; } private long finishTime_ ; /** * optional int64 finishTime = 10; * @return Whether the finishTime field is set. */ @java.lang.Override public boolean hasFinishTime() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 finishTime = 10; * @return The finishTime. */ @java.lang.Override public long getFinishTime() { return finishTime_; } /** * optional int64 finishTime = 10; * @param value The finishTime to set. * @return This builder for chaining. */ public Builder setFinishTime(long value) { finishTime_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional int64 finishTime = 10; * @return This builder for chaining. */ public Builder clearFinishTime() { bitField0_ = (bitField0_ & ~0x00000200); finishTime_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptReportProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptReportProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationAttemptReportProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeIdProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeIdProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string host = 1; * @return Whether the host field is set. */ boolean hasHost(); /** * optional string host = 1; * @return The host. */ java.lang.String getHost(); /** * optional string host = 1; * @return The bytes for host. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes(); /** * optional int32 port = 2; * @return Whether the port field is set. */ boolean hasPort(); /** * optional int32 port = 2; * @return The port. */ int getPort(); } /** * Protobuf type {@code hadoop.yarn.NodeIdProto} */ public static final class NodeIdProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeIdProto) NodeIdProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeIdProto.newBuilder() to construct. private NodeIdProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeIdProto() { host_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeIdProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder.class); } private int bitField0_; public static final int HOST_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object host_ = ""; /** * optional string host = 1; * @return Whether the host field is set. */ @java.lang.Override public boolean hasHost() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string host = 1; * @return The host. */ @java.lang.Override public java.lang.String getHost() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } } /** * optional string host = 1; * @return The bytes for host. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int PORT_FIELD_NUMBER = 2; private int port_ = 0; /** * optional int32 port = 2; * @return Whether the port field is set. */ @java.lang.Override public boolean hasPort() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 port = 2; * @return The port. */ @java.lang.Override public int getPort() { return port_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, host_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, port_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, host_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, port_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) obj; if (hasHost() != other.hasHost()) return false; if (hasHost()) { if (!getHost() .equals(other.getHost())) return false; } if (hasPort() != other.hasPort()) return false; if (hasPort()) { if (getPort() != other.getPort()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHost()) { hash = (37 * hash) + HOST_FIELD_NUMBER; hash = (53 * hash) + getHost().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeIdProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeIdProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; host_ = ""; port_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.host_ = host_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.port_ = port_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) return this; if (other.hasHost()) { host_ = other.host_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { host_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { port_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object host_ = ""; /** * optional string host = 1; * @return Whether the host field is set. */ public boolean hasHost() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string host = 1; * @return The host. */ public java.lang.String getHost() { java.lang.Object ref = host_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { host_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string host = 1; * @return The bytes for host. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostBytes() { java.lang.Object ref = host_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); host_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string host = 1; * @param value The host to set. * @return This builder for chaining. */ public Builder setHost( java.lang.String value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string host = 1; * @return This builder for chaining. */ public Builder clearHost() { host_ = getDefaultInstance().getHost(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string host = 1; * @param value The bytes for host to set. * @return This builder for chaining. */ public Builder setHostBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } host_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int port_ ; /** * optional int32 port = 2; * @return Whether the port field is set. */ @java.lang.Override public boolean hasPort() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 port = 2; * @return The port. */ @java.lang.Override public int getPort() { return port_; } /** * optional int32 port = 2; * @param value The port to set. * @return This builder for chaining. */ public Builder setPort(int value) { port_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 port = 2; * @return This builder for chaining. */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000002); port_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeIdProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeIdProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeIdProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeReportProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeReportProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * optional string httpAddress = 2; * @return Whether the httpAddress field is set. */ boolean hasHttpAddress(); /** * optional string httpAddress = 2; * @return The httpAddress. */ java.lang.String getHttpAddress(); /** * optional string httpAddress = 2; * @return The bytes for httpAddress. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHttpAddressBytes(); /** * optional string rackName = 3; * @return Whether the rackName field is set. */ boolean hasRackName(); /** * optional string rackName = 3; * @return The rackName. */ java.lang.String getRackName(); /** * optional string rackName = 3; * @return The bytes for rackName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRackNameBytes(); /** * optional .hadoop.yarn.ResourceProto used = 4; * @return Whether the used field is set. */ boolean hasUsed(); /** * optional .hadoop.yarn.ResourceProto used = 4; * @return The used. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed(); /** * optional .hadoop.yarn.ResourceProto used = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder(); /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ boolean hasCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder(); /** * optional int32 numContainers = 6; * @return Whether the numContainers field is set. */ boolean hasNumContainers(); /** * optional int32 numContainers = 6; * @return The numContainers. */ int getNumContainers(); /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return Whether the nodeState field is set. */ boolean hasNodeState(); /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return The nodeState. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState(); /** * optional string health_report = 8; * @return Whether the healthReport field is set. */ boolean hasHealthReport(); /** * optional string health_report = 8; * @return The healthReport. */ java.lang.String getHealthReport(); /** * optional string health_report = 8; * @return The bytes for healthReport. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHealthReportBytes(); /** * optional int64 last_health_report_time = 9; * @return Whether the lastHealthReportTime field is set. */ boolean hasLastHealthReportTime(); /** * optional int64 last_health_report_time = 9; * @return The lastHealthReportTime. */ long getLastHealthReportTime(); /** * repeated string node_labels = 10; * @return A list containing the nodeLabels. */ java.util.List getNodeLabelsList(); /** * repeated string node_labels = 10; * @return The count of nodeLabels. */ int getNodeLabelsCount(); /** * repeated string node_labels = 10; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ java.lang.String getNodeLabels(int index); /** * repeated string node_labels = 10; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index); /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return Whether the containersUtilization field is set. */ boolean hasContainersUtilization(); /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return The containersUtilization. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization(); /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder(); /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return Whether the nodeUtilization field is set. */ boolean hasNodeUtilization(); /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return The nodeUtilization. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization(); /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder(); /** * optional uint32 decommissioning_timeout = 13; * @return Whether the decommissioningTimeout field is set. */ boolean hasDecommissioningTimeout(); /** * optional uint32 decommissioning_timeout = 13; * @return The decommissioningTimeout. */ int getDecommissioningTimeout(); /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return Whether the nodeUpdateType field is set. */ boolean hasNodeUpdateType(); /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return The nodeUpdateType. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType(); /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ java.util.List getNodeAttributesList(); /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index); /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ int getNodeAttributesCount(); /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ java.util.List getNodeAttributesOrBuilderList(); /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.NodeReportProto} */ public static final class NodeReportProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeReportProto) NodeReportProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeReportProto.newBuilder() to construct. private NodeReportProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeReportProto() { httpAddress_ = ""; rackName_ = ""; nodeState_ = 1; healthReport_ = ""; nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; nodeUpdateType_ = 0; nodeAttributes_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeReportProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder.class); } private int bitField0_; public static final int NODEID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ @java.lang.Override public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int HTTPADDRESS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object httpAddress_ = ""; /** * optional string httpAddress = 2; * @return Whether the httpAddress field is set. */ @java.lang.Override public boolean hasHttpAddress() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string httpAddress = 2; * @return The httpAddress. */ @java.lang.Override public java.lang.String getHttpAddress() { java.lang.Object ref = httpAddress_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { httpAddress_ = s; } return s; } } /** * optional string httpAddress = 2; * @return The bytes for httpAddress. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHttpAddressBytes() { java.lang.Object ref = httpAddress_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); httpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RACKNAME_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object rackName_ = ""; /** * optional string rackName = 3; * @return Whether the rackName field is set. */ @java.lang.Override public boolean hasRackName() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string rackName = 3; * @return The rackName. */ @java.lang.Override public java.lang.String getRackName() { java.lang.Object ref = rackName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rackName_ = s; } return s; } } /** * optional string rackName = 3; * @return The bytes for rackName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRackNameBytes() { java.lang.Object ref = rackName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rackName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int USED_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto used_; /** * optional .hadoop.yarn.ResourceProto used = 4; * @return Whether the used field is set. */ @java.lang.Override public boolean hasUsed() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto used = 4; * @return The used. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed() { return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_; } /** * optional .hadoop.yarn.ResourceProto used = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder() { return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_; } public static final int CAPABILITY_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ @java.lang.Override public boolean hasCapability() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } public static final int NUMCONTAINERS_FIELD_NUMBER = 6; private int numContainers_ = 0; /** * optional int32 numContainers = 6; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 numContainers = 6; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } public static final int NODE_STATE_FIELD_NUMBER = 7; private int nodeState_ = 1; /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return Whether the nodeState field is set. */ @java.lang.Override public boolean hasNodeState() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return The nodeState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState() { org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(nodeState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result; } public static final int HEALTH_REPORT_FIELD_NUMBER = 8; @SuppressWarnings("serial") private volatile java.lang.Object healthReport_ = ""; /** * optional string health_report = 8; * @return Whether the healthReport field is set. */ @java.lang.Override public boolean hasHealthReport() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string health_report = 8; * @return The healthReport. */ @java.lang.Override public java.lang.String getHealthReport() { java.lang.Object ref = healthReport_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { healthReport_ = s; } return s; } } /** * optional string health_report = 8; * @return The bytes for healthReport. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHealthReportBytes() { java.lang.Object ref = healthReport_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); healthReport_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LAST_HEALTH_REPORT_TIME_FIELD_NUMBER = 9; private long lastHealthReportTime_ = 0L; /** * optional int64 last_health_report_time = 9; * @return Whether the lastHealthReportTime field is set. */ @java.lang.Override public boolean hasLastHealthReportTime() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 last_health_report_time = 9; * @return The lastHealthReportTime. */ @java.lang.Override public long getLastHealthReportTime() { return lastHealthReportTime_; } public static final int NODE_LABELS_FIELD_NUMBER = 10; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_; /** * repeated string node_labels = 10; * @return A list containing the nodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_; } /** * repeated string node_labels = 10; * @return The count of nodeLabels. */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string node_labels = 10; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string node_labels = 10; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } public static final int CONTAINERS_UTILIZATION_FIELD_NUMBER = 11; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_; /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return Whether the containersUtilization field is set. */ @java.lang.Override public boolean hasContainersUtilization() { return ((bitField0_ & 0x00000200) != 0); } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return The containersUtilization. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() { return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_; } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() { return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_; } public static final int NODE_UTILIZATION_FIELD_NUMBER = 12; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_; /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return Whether the nodeUtilization field is set. */ @java.lang.Override public boolean hasNodeUtilization() { return ((bitField0_ & 0x00000400) != 0); } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return The nodeUtilization. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() { return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_; } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() { return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_; } public static final int DECOMMISSIONING_TIMEOUT_FIELD_NUMBER = 13; private int decommissioningTimeout_ = 0; /** * optional uint32 decommissioning_timeout = 13; * @return Whether the decommissioningTimeout field is set. */ @java.lang.Override public boolean hasDecommissioningTimeout() { return ((bitField0_ & 0x00000800) != 0); } /** * optional uint32 decommissioning_timeout = 13; * @return The decommissioningTimeout. */ @java.lang.Override public int getDecommissioningTimeout() { return decommissioningTimeout_; } public static final int NODE_UPDATE_TYPE_FIELD_NUMBER = 14; private int nodeUpdateType_ = 0; /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return Whether the nodeUpdateType field is set. */ @java.lang.Override public boolean hasNodeUpdateType() { return ((bitField0_ & 0x00001000) != 0); } /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return The nodeUpdateType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(nodeUpdateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.NODE_USABLE : result; } public static final int NODE_ATTRIBUTES_FIELD_NUMBER = 15; @SuppressWarnings("serial") private java.util.List nodeAttributes_; /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ @java.lang.Override public java.util.List getNodeAttributesList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ @java.lang.Override public java.util.List getNodeAttributesOrBuilderList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ @java.lang.Override public int getNodeAttributesCount() { return nodeAttributes_.size(); } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) { return nodeAttributes_.get(index); } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index) { return nodeAttributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasUsed()) { if (!getUsed().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasCapability()) { if (!getCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasContainersUtilization()) { if (!getContainersUtilization().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasNodeUtilization()) { if (!getNodeUtilization().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, httpAddress_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, rackName_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getUsed()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(5, getCapability()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt32(6, numContainers_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeEnum(7, nodeState_); } if (((bitField0_ & 0x00000080) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, healthReport_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeInt64(9, lastHealthReportTime_); } for (int i = 0; i < nodeLabels_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, nodeLabels_.getRaw(i)); } if (((bitField0_ & 0x00000200) != 0)) { output.writeMessage(11, getContainersUtilization()); } if (((bitField0_ & 0x00000400) != 0)) { output.writeMessage(12, getNodeUtilization()); } if (((bitField0_ & 0x00000800) != 0)) { output.writeUInt32(13, decommissioningTimeout_); } if (((bitField0_ & 0x00001000) != 0)) { output.writeEnum(14, nodeUpdateType_); } for (int i = 0; i < nodeAttributes_.size(); i++) { output.writeMessage(15, nodeAttributes_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getNodeId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, httpAddress_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, rackName_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getUsed()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getCapability()); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(6, numContainers_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(7, nodeState_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, healthReport_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(9, lastHealthReportTime_); } { int dataSize = 0; for (int i = 0; i < nodeLabels_.size(); i++) { dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i)); } size += dataSize; size += 1 * getNodeLabelsList().size(); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(11, getContainersUtilization()); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(12, getNodeUtilization()); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeUInt32Size(13, decommissioningTimeout_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(14, nodeUpdateType_); } for (int i = 0; i < nodeAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(15, nodeAttributes_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto) obj; if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (hasHttpAddress() != other.hasHttpAddress()) return false; if (hasHttpAddress()) { if (!getHttpAddress() .equals(other.getHttpAddress())) return false; } if (hasRackName() != other.hasRackName()) return false; if (hasRackName()) { if (!getRackName() .equals(other.getRackName())) return false; } if (hasUsed() != other.hasUsed()) return false; if (hasUsed()) { if (!getUsed() .equals(other.getUsed())) return false; } if (hasCapability() != other.hasCapability()) return false; if (hasCapability()) { if (!getCapability() .equals(other.getCapability())) return false; } if (hasNumContainers() != other.hasNumContainers()) return false; if (hasNumContainers()) { if (getNumContainers() != other.getNumContainers()) return false; } if (hasNodeState() != other.hasNodeState()) return false; if (hasNodeState()) { if (nodeState_ != other.nodeState_) return false; } if (hasHealthReport() != other.hasHealthReport()) return false; if (hasHealthReport()) { if (!getHealthReport() .equals(other.getHealthReport())) return false; } if (hasLastHealthReportTime() != other.hasLastHealthReportTime()) return false; if (hasLastHealthReportTime()) { if (getLastHealthReportTime() != other.getLastHealthReportTime()) return false; } if (!getNodeLabelsList() .equals(other.getNodeLabelsList())) return false; if (hasContainersUtilization() != other.hasContainersUtilization()) return false; if (hasContainersUtilization()) { if (!getContainersUtilization() .equals(other.getContainersUtilization())) return false; } if (hasNodeUtilization() != other.hasNodeUtilization()) return false; if (hasNodeUtilization()) { if (!getNodeUtilization() .equals(other.getNodeUtilization())) return false; } if (hasDecommissioningTimeout() != other.hasDecommissioningTimeout()) return false; if (hasDecommissioningTimeout()) { if (getDecommissioningTimeout() != other.getDecommissioningTimeout()) return false; } if (hasNodeUpdateType() != other.hasNodeUpdateType()) return false; if (hasNodeUpdateType()) { if (nodeUpdateType_ != other.nodeUpdateType_) return false; } if (!getNodeAttributesList() .equals(other.getNodeAttributesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeId()) { hash = (37 * hash) + NODEID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (hasHttpAddress()) { hash = (37 * hash) + HTTPADDRESS_FIELD_NUMBER; hash = (53 * hash) + getHttpAddress().hashCode(); } if (hasRackName()) { hash = (37 * hash) + RACKNAME_FIELD_NUMBER; hash = (53 * hash) + getRackName().hashCode(); } if (hasUsed()) { hash = (37 * hash) + USED_FIELD_NUMBER; hash = (53 * hash) + getUsed().hashCode(); } if (hasCapability()) { hash = (37 * hash) + CAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getCapability().hashCode(); } if (hasNumContainers()) { hash = (37 * hash) + NUMCONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getNumContainers(); } if (hasNodeState()) { hash = (37 * hash) + NODE_STATE_FIELD_NUMBER; hash = (53 * hash) + nodeState_; } if (hasHealthReport()) { hash = (37 * hash) + HEALTH_REPORT_FIELD_NUMBER; hash = (53 * hash) + getHealthReport().hashCode(); } if (hasLastHealthReportTime()) { hash = (37 * hash) + LAST_HEALTH_REPORT_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getLastHealthReportTime()); } if (getNodeLabelsCount() > 0) { hash = (37 * hash) + NODE_LABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelsList().hashCode(); } if (hasContainersUtilization()) { hash = (37 * hash) + CONTAINERS_UTILIZATION_FIELD_NUMBER; hash = (53 * hash) + getContainersUtilization().hashCode(); } if (hasNodeUtilization()) { hash = (37 * hash) + NODE_UTILIZATION_FIELD_NUMBER; hash = (53 * hash) + getNodeUtilization().hashCode(); } if (hasDecommissioningTimeout()) { hash = (37 * hash) + DECOMMISSIONING_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getDecommissioningTimeout(); } if (hasNodeUpdateType()) { hash = (37 * hash) + NODE_UPDATE_TYPE_FIELD_NUMBER; hash = (53 * hash) + nodeUpdateType_; } if (getNodeAttributesCount() > 0) { hash = (37 * hash) + NODE_ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getNodeAttributesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeReportProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeReportProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeIdFieldBuilder(); getUsedFieldBuilder(); getCapabilityFieldBuilder(); getContainersUtilizationFieldBuilder(); getNodeUtilizationFieldBuilder(); getNodeAttributesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } httpAddress_ = ""; rackName_ = ""; used_ = null; if (usedBuilder_ != null) { usedBuilder_.dispose(); usedBuilder_ = null; } capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } numContainers_ = 0; nodeState_ = 1; healthReport_ = ""; lastHealthReportTime_ = 0L; nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); containersUtilization_ = null; if (containersUtilizationBuilder_ != null) { containersUtilizationBuilder_.dispose(); containersUtilizationBuilder_ = null; } nodeUtilization_ = null; if (nodeUtilizationBuilder_ != null) { nodeUtilizationBuilder_.dispose(); nodeUtilizationBuilder_ = null; } decommissioningTimeout_ = 0; nodeUpdateType_ = 0; if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); } else { nodeAttributes_ = null; nodeAttributesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00004000); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result) { if (((bitField0_ & 0x00000200) != 0)) { nodeLabels_ = nodeLabels_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000200); } result.nodeLabels_ = nodeLabels_; if (nodeAttributesBuilder_ == null) { if (((bitField0_ & 0x00004000) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); bitField0_ = (bitField0_ & ~0x00004000); } result.nodeAttributes_ = nodeAttributes_; } else { result.nodeAttributes_ = nodeAttributesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.nodeId_ = nodeIdBuilder_ == null ? nodeId_ : nodeIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.httpAddress_ = httpAddress_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.rackName_ = rackName_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.used_ = usedBuilder_ == null ? used_ : usedBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.capability_ = capabilityBuilder_ == null ? capability_ : capabilityBuilder_.build(); to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.numContainers_ = numContainers_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.nodeState_ = nodeState_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.healthReport_ = healthReport_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.lastHealthReportTime_ = lastHealthReportTime_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000400) != 0)) { result.containersUtilization_ = containersUtilizationBuilder_ == null ? containersUtilization_ : containersUtilizationBuilder_.build(); to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000800) != 0)) { result.nodeUtilization_ = nodeUtilizationBuilder_ == null ? nodeUtilization_ : nodeUtilizationBuilder_.build(); to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00001000) != 0)) { result.decommissioningTimeout_ = decommissioningTimeout_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00002000) != 0)) { result.nodeUpdateType_ = nodeUpdateType_; to_bitField0_ |= 0x00001000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()) return this; if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (other.hasHttpAddress()) { httpAddress_ = other.httpAddress_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasRackName()) { rackName_ = other.rackName_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasUsed()) { mergeUsed(other.getUsed()); } if (other.hasCapability()) { mergeCapability(other.getCapability()); } if (other.hasNumContainers()) { setNumContainers(other.getNumContainers()); } if (other.hasNodeState()) { setNodeState(other.getNodeState()); } if (other.hasHealthReport()) { healthReport_ = other.healthReport_; bitField0_ |= 0x00000080; onChanged(); } if (other.hasLastHealthReportTime()) { setLastHealthReportTime(other.getLastHealthReportTime()); } if (!other.nodeLabels_.isEmpty()) { if (nodeLabels_.isEmpty()) { nodeLabels_ = other.nodeLabels_; bitField0_ = (bitField0_ & ~0x00000200); } else { ensureNodeLabelsIsMutable(); nodeLabels_.addAll(other.nodeLabels_); } onChanged(); } if (other.hasContainersUtilization()) { mergeContainersUtilization(other.getContainersUtilization()); } if (other.hasNodeUtilization()) { mergeNodeUtilization(other.getNodeUtilization()); } if (other.hasDecommissioningTimeout()) { setDecommissioningTimeout(other.getDecommissioningTimeout()); } if (other.hasNodeUpdateType()) { setNodeUpdateType(other.getNodeUpdateType()); } if (nodeAttributesBuilder_ == null) { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributes_.isEmpty()) { nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00004000); } else { ensureNodeAttributesIsMutable(); nodeAttributes_.addAll(other.nodeAttributes_); } onChanged(); } } else { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributesBuilder_.isEmpty()) { nodeAttributesBuilder_.dispose(); nodeAttributesBuilder_ = null; nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00004000); nodeAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeAttributesFieldBuilder() : null; } else { nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasUsed()) { if (!getUsed().isInitialized()) { return false; } } if (hasCapability()) { if (!getCapability().isInitialized()) { return false; } } if (hasContainersUtilization()) { if (!getContainersUtilization().isInitialized()) { return false; } } if (hasNodeUtilization()) { if (!getNodeUtilization().isInitialized()) { return false; } } for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNodeIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { httpAddress_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { rackName_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getUsedFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 case 42: { input.readMessage( getCapabilityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 48: { numContainers_ = input.readInt32(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(7, tmpRaw); } else { nodeState_ = tmpRaw; bitField0_ |= 0x00000040; } break; } // case 56 case 66: { healthReport_ = input.readBytes(); bitField0_ |= 0x00000080; break; } // case 66 case 72: { lastHealthReportTime_ = input.readInt64(); bitField0_ |= 0x00000100; break; } // case 72 case 82: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureNodeLabelsIsMutable(); nodeLabels_.add(bs); break; } // case 82 case 90: { input.readMessage( getContainersUtilizationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000400; break; } // case 90 case 98: { input.readMessage( getNodeUtilizationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000800; break; } // case 98 case 104: { decommissioningTimeout_ = input.readUInt32(); bitField0_ |= 0x00001000; break; } // case 104 case 112: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(14, tmpRaw); } else { nodeUpdateType_ = tmpRaw; bitField0_ |= 0x00002000; } break; } // case 112 case 122: { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.PARSER, extensionRegistry); if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(m); } else { nodeAttributesBuilder_.addMessage(m); } break; } // case 122 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { getNodeIdBuilder().mergeFrom(value); } else { nodeId_ = value; } } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder clearNodeId() { bitField0_ = (bitField0_ & ~0x00000001); nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private java.lang.Object httpAddress_ = ""; /** * optional string httpAddress = 2; * @return Whether the httpAddress field is set. */ public boolean hasHttpAddress() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string httpAddress = 2; * @return The httpAddress. */ public java.lang.String getHttpAddress() { java.lang.Object ref = httpAddress_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { httpAddress_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string httpAddress = 2; * @return The bytes for httpAddress. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHttpAddressBytes() { java.lang.Object ref = httpAddress_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); httpAddress_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string httpAddress = 2; * @param value The httpAddress to set. * @return This builder for chaining. */ public Builder setHttpAddress( java.lang.String value) { if (value == null) { throw new NullPointerException(); } httpAddress_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string httpAddress = 2; * @return This builder for chaining. */ public Builder clearHttpAddress() { httpAddress_ = getDefaultInstance().getHttpAddress(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string httpAddress = 2; * @param value The bytes for httpAddress to set. * @return This builder for chaining. */ public Builder setHttpAddressBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } httpAddress_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object rackName_ = ""; /** * optional string rackName = 3; * @return Whether the rackName field is set. */ public boolean hasRackName() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string rackName = 3; * @return The rackName. */ public java.lang.String getRackName() { java.lang.Object ref = rackName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rackName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string rackName = 3; * @return The bytes for rackName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRackNameBytes() { java.lang.Object ref = rackName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rackName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string rackName = 3; * @param value The rackName to set. * @return This builder for chaining. */ public Builder setRackName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } rackName_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string rackName = 3; * @return This builder for chaining. */ public Builder clearRackName() { rackName_ = getDefaultInstance().getRackName(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string rackName = 3; * @param value The bytes for rackName to set. * @return This builder for chaining. */ public Builder setRackNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } rackName_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto used_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> usedBuilder_; /** * optional .hadoop.yarn.ResourceProto used = 4; * @return Whether the used field is set. */ public boolean hasUsed() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceProto used = 4; * @return The used. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed() { if (usedBuilder_ == null) { return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_; } else { return usedBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public Builder setUsed(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (usedBuilder_ == null) { if (value == null) { throw new NullPointerException(); } used_ = value; } else { usedBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public Builder setUsed( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (usedBuilder_ == null) { used_ = builderForValue.build(); } else { usedBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public Builder mergeUsed(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (usedBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && used_ != null && used_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getUsedBuilder().mergeFrom(value); } else { used_ = value; } } else { usedBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public Builder clearUsed() { bitField0_ = (bitField0_ & ~0x00000008); used_ = null; if (usedBuilder_ != null) { usedBuilder_.dispose(); usedBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getUsedBuilder() { bitField0_ |= 0x00000008; onChanged(); return getUsedFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto used = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder() { if (usedBuilder_ != null) { return usedBuilder_.getMessageOrBuilder(); } else { return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_; } } /** * optional .hadoop.yarn.ResourceProto used = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getUsedFieldBuilder() { if (usedBuilder_ == null) { usedBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getUsed(), getParentForChildren(), isClean()); used_ = null; } return usedBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ public boolean hasCapability() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { if (capabilityBuilder_ == null) { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } else { return capabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder setCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capability_ = value; } else { capabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder setCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (capabilityBuilder_ == null) { capability_ = builderForValue.build(); } else { capabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder mergeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && capability_ != null && capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getCapabilityBuilder().mergeFrom(value); } else { capability_ = value; } } else { capabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder clearCapability() { bitField0_ = (bitField0_ & ~0x00000010); capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() { bitField0_ |= 0x00000010; onChanged(); return getCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { if (capabilityBuilder_ != null) { return capabilityBuilder_.getMessageOrBuilder(); } else { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getCapabilityFieldBuilder() { if (capabilityBuilder_ == null) { capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getCapability(), getParentForChildren(), isClean()); capability_ = null; } return capabilityBuilder_; } private int numContainers_ ; /** * optional int32 numContainers = 6; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 numContainers = 6; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } /** * optional int32 numContainers = 6; * @param value The numContainers to set. * @return This builder for chaining. */ public Builder setNumContainers(int value) { numContainers_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int32 numContainers = 6; * @return This builder for chaining. */ public Builder clearNumContainers() { bitField0_ = (bitField0_ & ~0x00000020); numContainers_ = 0; onChanged(); return this; } private int nodeState_ = 1; /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return Whether the nodeState field is set. */ @java.lang.Override public boolean hasNodeState() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return The nodeState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState() { org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(nodeState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result; } /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @param value The nodeState to set. * @return This builder for chaining. */ public Builder setNodeState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; nodeState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.NodeStateProto node_state = 7; * @return This builder for chaining. */ public Builder clearNodeState() { bitField0_ = (bitField0_ & ~0x00000040); nodeState_ = 1; onChanged(); return this; } private java.lang.Object healthReport_ = ""; /** * optional string health_report = 8; * @return Whether the healthReport field is set. */ public boolean hasHealthReport() { return ((bitField0_ & 0x00000080) != 0); } /** * optional string health_report = 8; * @return The healthReport. */ public java.lang.String getHealthReport() { java.lang.Object ref = healthReport_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { healthReport_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string health_report = 8; * @return The bytes for healthReport. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHealthReportBytes() { java.lang.Object ref = healthReport_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); healthReport_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string health_report = 8; * @param value The healthReport to set. * @return This builder for chaining. */ public Builder setHealthReport( java.lang.String value) { if (value == null) { throw new NullPointerException(); } healthReport_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional string health_report = 8; * @return This builder for chaining. */ public Builder clearHealthReport() { healthReport_ = getDefaultInstance().getHealthReport(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * optional string health_report = 8; * @param value The bytes for healthReport to set. * @return This builder for chaining. */ public Builder setHealthReportBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } healthReport_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } private long lastHealthReportTime_ ; /** * optional int64 last_health_report_time = 9; * @return Whether the lastHealthReportTime field is set. */ @java.lang.Override public boolean hasLastHealthReportTime() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 last_health_report_time = 9; * @return The lastHealthReportTime. */ @java.lang.Override public long getLastHealthReportTime() { return lastHealthReportTime_; } /** * optional int64 last_health_report_time = 9; * @param value The lastHealthReportTime to set. * @return This builder for chaining. */ public Builder setLastHealthReportTime(long value) { lastHealthReportTime_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional int64 last_health_report_time = 9; * @return This builder for chaining. */ public Builder clearLastHealthReportTime() { bitField0_ = (bitField0_ & ~0x00000100); lastHealthReportTime_ = 0L; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000200) != 0)) { nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_); bitField0_ |= 0x00000200; } } /** * repeated string node_labels = 10; * @return A list containing the nodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_.getUnmodifiableView(); } /** * repeated string node_labels = 10; * @return The count of nodeLabels. */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string node_labels = 10; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string node_labels = 10; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } /** * repeated string node_labels = 10; * @param index The index to set the value at. * @param value The nodeLabels to set. * @return This builder for chaining. */ public Builder setNodeLabels( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.set(index, value); onChanged(); return this; } /** * repeated string node_labels = 10; * @param value The nodeLabels to add. * @return This builder for chaining. */ public Builder addNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } /** * repeated string node_labels = 10; * @param values The nodeLabels to add. * @return This builder for chaining. */ public Builder addAllNodeLabels( java.lang.Iterable values) { ensureNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeLabels_); onChanged(); return this; } /** * repeated string node_labels = 10; * @return This builder for chaining. */ public Builder clearNodeLabels() { nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * repeated string node_labels = 10; * @param value The bytes of the nodeLabels to add. * @return This builder for chaining. */ public Builder addNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> containersUtilizationBuilder_; /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return Whether the containersUtilization field is set. */ public boolean hasContainersUtilization() { return ((bitField0_ & 0x00000400) != 0); } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; * @return The containersUtilization. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() { if (containersUtilizationBuilder_ == null) { return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_; } else { return containersUtilizationBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public Builder setContainersUtilization(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) { if (containersUtilizationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containersUtilization_ = value; } else { containersUtilizationBuilder_.setMessage(value); } bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public Builder setContainersUtilization( org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) { if (containersUtilizationBuilder_ == null) { containersUtilization_ = builderForValue.build(); } else { containersUtilizationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public Builder mergeContainersUtilization(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) { if (containersUtilizationBuilder_ == null) { if (((bitField0_ & 0x00000400) != 0) && containersUtilization_ != null && containersUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) { getContainersUtilizationBuilder().mergeFrom(value); } else { containersUtilization_ = value; } } else { containersUtilizationBuilder_.mergeFrom(value); } bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public Builder clearContainersUtilization() { bitField0_ = (bitField0_ & ~0x00000400); containersUtilization_ = null; if (containersUtilizationBuilder_ != null) { containersUtilizationBuilder_.dispose(); containersUtilizationBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getContainersUtilizationBuilder() { bitField0_ |= 0x00000400; onChanged(); return getContainersUtilizationFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() { if (containersUtilizationBuilder_ != null) { return containersUtilizationBuilder_.getMessageOrBuilder(); } else { return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_; } } /** * optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> getContainersUtilizationFieldBuilder() { if (containersUtilizationBuilder_ == null) { containersUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>( getContainersUtilization(), getParentForChildren(), isClean()); containersUtilization_ = null; } return containersUtilizationBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> nodeUtilizationBuilder_; /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return Whether the nodeUtilization field is set. */ public boolean hasNodeUtilization() { return ((bitField0_ & 0x00000800) != 0); } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; * @return The nodeUtilization. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() { if (nodeUtilizationBuilder_ == null) { return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_; } else { return nodeUtilizationBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public Builder setNodeUtilization(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) { if (nodeUtilizationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeUtilization_ = value; } else { nodeUtilizationBuilder_.setMessage(value); } bitField0_ |= 0x00000800; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public Builder setNodeUtilization( org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) { if (nodeUtilizationBuilder_ == null) { nodeUtilization_ = builderForValue.build(); } else { nodeUtilizationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000800; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public Builder mergeNodeUtilization(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) { if (nodeUtilizationBuilder_ == null) { if (((bitField0_ & 0x00000800) != 0) && nodeUtilization_ != null && nodeUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) { getNodeUtilizationBuilder().mergeFrom(value); } else { nodeUtilization_ = value; } } else { nodeUtilizationBuilder_.mergeFrom(value); } bitField0_ |= 0x00000800; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public Builder clearNodeUtilization() { bitField0_ = (bitField0_ & ~0x00000800); nodeUtilization_ = null; if (nodeUtilizationBuilder_ != null) { nodeUtilizationBuilder_.dispose(); nodeUtilizationBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getNodeUtilizationBuilder() { bitField0_ |= 0x00000800; onChanged(); return getNodeUtilizationFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() { if (nodeUtilizationBuilder_ != null) { return nodeUtilizationBuilder_.getMessageOrBuilder(); } else { return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_; } } /** * optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> getNodeUtilizationFieldBuilder() { if (nodeUtilizationBuilder_ == null) { nodeUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>( getNodeUtilization(), getParentForChildren(), isClean()); nodeUtilization_ = null; } return nodeUtilizationBuilder_; } private int decommissioningTimeout_ ; /** * optional uint32 decommissioning_timeout = 13; * @return Whether the decommissioningTimeout field is set. */ @java.lang.Override public boolean hasDecommissioningTimeout() { return ((bitField0_ & 0x00001000) != 0); } /** * optional uint32 decommissioning_timeout = 13; * @return The decommissioningTimeout. */ @java.lang.Override public int getDecommissioningTimeout() { return decommissioningTimeout_; } /** * optional uint32 decommissioning_timeout = 13; * @param value The decommissioningTimeout to set. * @return This builder for chaining. */ public Builder setDecommissioningTimeout(int value) { decommissioningTimeout_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } /** * optional uint32 decommissioning_timeout = 13; * @return This builder for chaining. */ public Builder clearDecommissioningTimeout() { bitField0_ = (bitField0_ & ~0x00001000); decommissioningTimeout_ = 0; onChanged(); return this; } private int nodeUpdateType_ = 0; /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return Whether the nodeUpdateType field is set. */ @java.lang.Override public boolean hasNodeUpdateType() { return ((bitField0_ & 0x00002000) != 0); } /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return The nodeUpdateType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(nodeUpdateType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.NODE_USABLE : result; } /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @param value The nodeUpdateType to set. * @return This builder for chaining. */ public Builder setNodeUpdateType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00002000; nodeUpdateType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14; * @return This builder for chaining. */ public Builder clearNodeUpdateType() { bitField0_ = (bitField0_ & ~0x00002000); nodeUpdateType_ = 0; onChanged(); return this; } private java.util.List nodeAttributes_ = java.util.Collections.emptyList(); private void ensureNodeAttributesIsMutable() { if (!((bitField0_ & 0x00004000) != 0)) { nodeAttributes_ = new java.util.ArrayList(nodeAttributes_); bitField0_ |= 0x00004000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> nodeAttributesBuilder_; /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public java.util.List getNodeAttributesList() { if (nodeAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeAttributes_); } else { return nodeAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public int getNodeAttributesCount() { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.size(); } else { return nodeAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, value); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder addNodeAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(value); onChanged(); } else { nodeAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, value); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder addNodeAttributes( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder addAllNodeAttributes( java.lang.Iterable values) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeAttributes_); onChanged(); } else { nodeAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder clearNodeAttributes() { if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00004000); onChanged(); } else { nodeAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public Builder removeNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.remove(index); onChanged(); } else { nodeAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder getNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public java.util.List getNodeAttributesOrBuilderList() { if (nodeAttributesBuilder_ != null) { return nodeAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeAttributes_); } } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder() { return getNodeAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15; */ public java.util.List getNodeAttributesBuilderList() { return getNodeAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> getNodeAttributesFieldBuilder() { if (nodeAttributesBuilder_ == null) { nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>( nodeAttributes_, ((bitField0_ & 0x00004000) != 0), getParentForChildren(), isClean()); nodeAttributes_ = null; } return nodeAttributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeReportProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeReportProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeReportProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeIdToLabelsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeIdToLabelsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ boolean hasNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(); /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(); /** * repeated string nodeLabels = 2; * @return A list containing the nodeLabels. */ java.util.List getNodeLabelsList(); /** * repeated string nodeLabels = 2; * @return The count of nodeLabels. */ int getNodeLabelsCount(); /** * repeated string nodeLabels = 2; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ java.lang.String getNodeLabels(int index); /** * repeated string nodeLabels = 2; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index); } /** * Protobuf type {@code hadoop.yarn.NodeIdToLabelsProto} */ public static final class NodeIdToLabelsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeIdToLabelsProto) NodeIdToLabelsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeIdToLabelsProto.newBuilder() to construct. private NodeIdToLabelsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeIdToLabelsProto() { nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeIdToLabelsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder.class); } private int bitField0_; public static final int NODEID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ @java.lang.Override public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } public static final int NODELABELS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_; /** * repeated string nodeLabels = 2; * @return A list containing the nodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_; } /** * repeated string nodeLabels = 2; * @return The count of nodeLabels. */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string nodeLabels = 2; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string nodeLabels = 2; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNodeId()); } for (int i = 0; i < nodeLabels_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, nodeLabels_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getNodeId()); } { int dataSize = 0; for (int i = 0; i < nodeLabels_.size(); i++) { dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i)); } size += dataSize; size += 1 * getNodeLabelsList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto) obj; if (hasNodeId() != other.hasNodeId()) return false; if (hasNodeId()) { if (!getNodeId() .equals(other.getNodeId())) return false; } if (!getNodeLabelsList() .equals(other.getNodeLabelsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeId()) { hash = (37 * hash) + NODEID_FIELD_NUMBER; hash = (53 * hash) + getNodeId().hashCode(); } if (getNodeLabelsCount() > 0) { hash = (37 * hash) + NODELABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeIdToLabelsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeIdToLabelsProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result) { if (((bitField0_ & 0x00000002) != 0)) { nodeLabels_ = nodeLabels_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.nodeLabels_ = nodeLabels_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.nodeId_ = nodeIdBuilder_ == null ? nodeId_ : nodeIdBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance()) return this; if (other.hasNodeId()) { mergeNodeId(other.getNodeId()); } if (!other.nodeLabels_.isEmpty()) { if (nodeLabels_.isEmpty()) { nodeLabels_ = other.nodeLabels_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNodeLabelsIsMutable(); nodeLabels_.addAll(other.nodeLabels_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNodeIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureNodeLabelsIsMutable(); nodeLabels_.add(bs); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return Whether the nodeId field is set. */ public boolean hasNodeId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; * @return The nodeId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() { if (nodeIdBuilder_ == null) { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } else { return nodeIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeId_ = value; } else { nodeIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder setNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { nodeId_ = builderForValue.build(); } else { nodeIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder mergeNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && nodeId_ != null && nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) { getNodeIdBuilder().mergeFrom(value); } else { nodeId_ = value; } } else { nodeIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public Builder clearNodeId() { bitField0_ = (bitField0_ & ~0x00000001); nodeId_ = null; if (nodeIdBuilder_ != null) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNodeIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilder(); } else { return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_; } } /** * optional .hadoop.yarn.NodeIdProto nodeId = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( getNodeId(), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_); bitField0_ |= 0x00000002; } } /** * repeated string nodeLabels = 2; * @return A list containing the nodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getNodeLabelsList() { return nodeLabels_.getUnmodifiableView(); } /** * repeated string nodeLabels = 2; * @return The count of nodeLabels. */ public int getNodeLabelsCount() { return nodeLabels_.size(); } /** * repeated string nodeLabels = 2; * @param index The index of the element to return. * @return The nodeLabels at the given index. */ public java.lang.String getNodeLabels(int index) { return nodeLabels_.get(index); } /** * repeated string nodeLabels = 2; * @param index The index of the value to return. * @return The bytes of the nodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(int index) { return nodeLabels_.getByteString(index); } /** * repeated string nodeLabels = 2; * @param index The index to set the value at. * @param value The nodeLabels to set. * @return This builder for chaining. */ public Builder setNodeLabels( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.set(index, value); onChanged(); return this; } /** * repeated string nodeLabels = 2; * @param value The nodeLabels to add. * @return This builder for chaining. */ public Builder addNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } /** * repeated string nodeLabels = 2; * @param values The nodeLabels to add. * @return This builder for chaining. */ public Builder addAllNodeLabels( java.lang.Iterable values) { ensureNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeLabels_); onChanged(); return this; } /** * repeated string nodeLabels = 2; * @return This builder for chaining. */ public Builder clearNodeLabels() { nodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * repeated string nodeLabels = 2; * @param value The bytes of the nodeLabels to add. * @return This builder for chaining. */ public Builder addNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureNodeLabelsIsMutable(); nodeLabels_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeIdToLabelsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeIdToLabelsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeIdToLabelsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface LabelsToNodeIdsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.LabelsToNodeIdsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string nodeLabels = 1; * @return Whether the nodeLabels field is set. */ boolean hasNodeLabels(); /** * optional string nodeLabels = 1; * @return The nodeLabels. */ java.lang.String getNodeLabels(); /** * optional string nodeLabels = 1; * @return The bytes for nodeLabels. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes(); /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ java.util.List getNodeIdList(); /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index); /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ int getNodeIdCount(); /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ java.util.List getNodeIdOrBuilderList(); /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.LabelsToNodeIdsProto} */ public static final class LabelsToNodeIdsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.LabelsToNodeIdsProto) LabelsToNodeIdsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use LabelsToNodeIdsProto.newBuilder() to construct. private LabelsToNodeIdsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private LabelsToNodeIdsProto() { nodeLabels_ = ""; nodeId_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LabelsToNodeIdsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder.class); } private int bitField0_; public static final int NODELABELS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object nodeLabels_ = ""; /** * optional string nodeLabels = 1; * @return Whether the nodeLabels field is set. */ @java.lang.Override public boolean hasNodeLabels() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string nodeLabels = 1; * @return The nodeLabels. */ @java.lang.Override public java.lang.String getNodeLabels() { java.lang.Object ref = nodeLabels_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabels_ = s; } return s; } } /** * optional string nodeLabels = 1; * @return The bytes for nodeLabels. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes() { java.lang.Object ref = nodeLabels_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabels_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int NODEID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List nodeId_; /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public java.util.List getNodeIdList() { return nodeId_; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public java.util.List getNodeIdOrBuilderList() { return nodeId_; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public int getNodeIdCount() { return nodeId_.size(); } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index) { return nodeId_.get(index); } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder( int index) { return nodeId_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, nodeLabels_); } for (int i = 0; i < nodeId_.size(); i++) { output.writeMessage(2, nodeId_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, nodeLabels_); } for (int i = 0; i < nodeId_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, nodeId_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) obj; if (hasNodeLabels() != other.hasNodeLabels()) return false; if (hasNodeLabels()) { if (!getNodeLabels() .equals(other.getNodeLabels())) return false; } if (!getNodeIdList() .equals(other.getNodeIdList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeLabels()) { hash = (37 * hash) + NODELABELS_FIELD_NUMBER; hash = (53 * hash) + getNodeLabels().hashCode(); } if (getNodeIdCount() > 0) { hash = (37 * hash) + NODEID_FIELD_NUMBER; hash = (53 * hash) + getNodeIdList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.LabelsToNodeIdsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.LabelsToNodeIdsProto) org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; nodeLabels_ = ""; if (nodeIdBuilder_ == null) { nodeId_ = java.util.Collections.emptyList(); } else { nodeId_ = null; nodeIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result) { if (nodeIdBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { nodeId_ = java.util.Collections.unmodifiableList(nodeId_); bitField0_ = (bitField0_ & ~0x00000002); } result.nodeId_ = nodeId_; } else { result.nodeId_ = nodeIdBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.nodeLabels_ = nodeLabels_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance()) return this; if (other.hasNodeLabels()) { nodeLabels_ = other.nodeLabels_; bitField0_ |= 0x00000001; onChanged(); } if (nodeIdBuilder_ == null) { if (!other.nodeId_.isEmpty()) { if (nodeId_.isEmpty()) { nodeId_ = other.nodeId_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNodeIdIsMutable(); nodeId_.addAll(other.nodeId_); } onChanged(); } } else { if (!other.nodeId_.isEmpty()) { if (nodeIdBuilder_.isEmpty()) { nodeIdBuilder_.dispose(); nodeIdBuilder_ = null; nodeId_ = other.nodeId_; bitField0_ = (bitField0_ & ~0x00000002); nodeIdBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeIdFieldBuilder() : null; } else { nodeIdBuilder_.addAllMessages(other.nodeId_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { nodeLabels_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER, extensionRegistry); if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); nodeId_.add(m); } else { nodeIdBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object nodeLabels_ = ""; /** * optional string nodeLabels = 1; * @return Whether the nodeLabels field is set. */ public boolean hasNodeLabels() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string nodeLabels = 1; * @return The nodeLabels. */ public java.lang.String getNodeLabels() { java.lang.Object ref = nodeLabels_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabels_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string nodeLabels = 1; * @return The bytes for nodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelsBytes() { java.lang.Object ref = nodeLabels_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabels_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string nodeLabels = 1; * @param value The nodeLabels to set. * @return This builder for chaining. */ public Builder setNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodeLabels_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string nodeLabels = 1; * @return This builder for chaining. */ public Builder clearNodeLabels() { nodeLabels_ = getDefaultInstance().getNodeLabels(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string nodeLabels = 1; * @param value The bytes for nodeLabels to set. * @return This builder for chaining. */ public Builder setNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nodeLabels_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List nodeId_ = java.util.Collections.emptyList(); private void ensureNodeIdIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { nodeId_ = new java.util.ArrayList(nodeId_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_; /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public java.util.List getNodeIdList() { if (nodeIdBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeId_); } else { return nodeIdBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public int getNodeIdCount() { if (nodeIdBuilder_ == null) { return nodeId_.size(); } else { return nodeIdBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index) { if (nodeIdBuilder_ == null) { return nodeId_.get(index); } else { return nodeIdBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder setNodeId( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIdIsMutable(); nodeId_.set(index, value); onChanged(); } else { nodeIdBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder setNodeId( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); nodeId_.set(index, builderForValue.build()); onChanged(); } else { nodeIdBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder addNodeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIdIsMutable(); nodeId_.add(value); onChanged(); } else { nodeIdBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder addNodeId( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) { if (nodeIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeIdIsMutable(); nodeId_.add(index, value); onChanged(); } else { nodeIdBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder addNodeId( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); nodeId_.add(builderForValue.build()); onChanged(); } else { nodeIdBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder addNodeId( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) { if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); nodeId_.add(index, builderForValue.build()); onChanged(); } else { nodeIdBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder addAllNodeId( java.lang.Iterable values) { if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeId_); onChanged(); } else { nodeIdBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder clearNodeId() { if (nodeIdBuilder_ == null) { nodeId_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { nodeIdBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public Builder removeNodeId(int index) { if (nodeIdBuilder_ == null) { ensureNodeIdIsMutable(); nodeId_.remove(index); onChanged(); } else { nodeIdBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder( int index) { return getNodeIdFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder( int index) { if (nodeIdBuilder_ == null) { return nodeId_.get(index); } else { return nodeIdBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public java.util.List getNodeIdOrBuilderList() { if (nodeIdBuilder_ != null) { return nodeIdBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeId_); } } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addNodeIdBuilder() { return getNodeIdFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addNodeIdBuilder( int index) { return getNodeIdFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeIdProto nodeId = 2; */ public java.util.List getNodeIdBuilderList() { return getNodeIdFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> getNodeIdFieldBuilder() { if (nodeIdBuilder_ == null) { nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>( nodeId_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); nodeId_ = null; } return nodeIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.LabelsToNodeIdsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.LabelsToNodeIdsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public LabelsToNodeIdsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeLabelProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeLabelProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string name = 1; * @return Whether the name field is set. */ boolean hasName(); /** * optional string name = 1; * @return The name. */ java.lang.String getName(); /** * optional string name = 1; * @return The bytes for name. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes(); /** * optional bool isExclusive = 2 [default = true]; * @return Whether the isExclusive field is set. */ boolean hasIsExclusive(); /** * optional bool isExclusive = 2 [default = true]; * @return The isExclusive. */ boolean getIsExclusive(); } /** * Protobuf type {@code hadoop.yarn.NodeLabelProto} */ public static final class NodeLabelProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeLabelProto) NodeLabelProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeLabelProto.newBuilder() to construct. private NodeLabelProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeLabelProto() { name_ = ""; isExclusive_ = true; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeLabelProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * optional string name = 1; * @return Whether the name field is set. */ @java.lang.Override public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string name = 1; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * optional string name = 1; * @return The bytes for name. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int ISEXCLUSIVE_FIELD_NUMBER = 2; private boolean isExclusive_ = true; /** * optional bool isExclusive = 2 [default = true]; * @return Whether the isExclusive field is set. */ @java.lang.Override public boolean hasIsExclusive() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool isExclusive = 2 [default = true]; * @return The isExclusive. */ @java.lang.Override public boolean getIsExclusive() { return isExclusive_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeBool(2, isExclusive_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(2, isExclusive_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto) obj; if (hasName() != other.hasName()) return false; if (hasName()) { if (!getName() .equals(other.getName())) return false; } if (hasIsExclusive() != other.hasIsExclusive()) return false; if (hasIsExclusive()) { if (getIsExclusive() != other.getIsExclusive()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasIsExclusive()) { hash = (37 * hash) + ISEXCLUSIVE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIsExclusive()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeLabelProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeLabelProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; isExclusive_ = true; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.isExclusive_ = isExclusive_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance()) return this; if (other.hasName()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasIsExclusive()) { setIsExclusive(other.getIsExclusive()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { isExclusive_ = input.readBool(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * optional string name = 1; * @return Whether the name field is set. */ public boolean hasName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string name = 1; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string name = 1; * @return The bytes for name. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string name = 1; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string name = 1; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string name = 1; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private boolean isExclusive_ = true; /** * optional bool isExclusive = 2 [default = true]; * @return Whether the isExclusive field is set. */ @java.lang.Override public boolean hasIsExclusive() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool isExclusive = 2 [default = true]; * @return The isExclusive. */ @java.lang.Override public boolean getIsExclusive() { return isExclusive_; } /** * optional bool isExclusive = 2 [default = true]; * @param value The isExclusive to set. * @return This builder for chaining. */ public Builder setIsExclusive(boolean value) { isExclusive_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional bool isExclusive = 2 [default = true]; * @return This builder for chaining. */ public Builder clearIsExclusive() { bitField0_ = (bitField0_ & ~0x00000002); isExclusive_ = true; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeLabelProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeLabelProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeLabelProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeAttributeKeyProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeKeyProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return Whether the attributePrefix field is set. */ boolean hasAttributePrefix(); /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The attributePrefix. */ java.lang.String getAttributePrefix(); /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The bytes for attributePrefix. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAttributePrefixBytes(); /** * required string attributeName = 2; * @return Whether the attributeName field is set. */ boolean hasAttributeName(); /** * required string attributeName = 2; * @return The attributeName. */ java.lang.String getAttributeName(); /** * required string attributeName = 2; * @return The bytes for attributeName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeNameBytes(); } /** * Protobuf type {@code hadoop.yarn.NodeAttributeKeyProto} */ public static final class NodeAttributeKeyProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeKeyProto) NodeAttributeKeyProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeAttributeKeyProto.newBuilder() to construct. private NodeAttributeKeyProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeAttributeKeyProto() { attributePrefix_ = "rm.yarn.io"; attributeName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeAttributeKeyProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder.class); } private int bitField0_; public static final int ATTRIBUTEPREFIX_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object attributePrefix_ = "rm.yarn.io"; /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return Whether the attributePrefix field is set. */ @java.lang.Override public boolean hasAttributePrefix() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The attributePrefix. */ @java.lang.Override public java.lang.String getAttributePrefix() { java.lang.Object ref = attributePrefix_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributePrefix_ = s; } return s; } } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The bytes for attributePrefix. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributePrefixBytes() { java.lang.Object ref = attributePrefix_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributePrefix_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int ATTRIBUTENAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object attributeName_ = ""; /** * required string attributeName = 2; * @return Whether the attributeName field is set. */ @java.lang.Override public boolean hasAttributeName() { return ((bitField0_ & 0x00000002) != 0); } /** * required string attributeName = 2; * @return The attributeName. */ @java.lang.Override public java.lang.String getAttributeName() { java.lang.Object ref = attributeName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeName_ = s; } return s; } } /** * required string attributeName = 2; * @return The bytes for attributeName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeNameBytes() { java.lang.Object ref = attributeName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasAttributeName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, attributePrefix_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, attributeName_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, attributePrefix_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, attributeName_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto) obj; if (hasAttributePrefix() != other.hasAttributePrefix()) return false; if (hasAttributePrefix()) { if (!getAttributePrefix() .equals(other.getAttributePrefix())) return false; } if (hasAttributeName() != other.hasAttributeName()) return false; if (hasAttributeName()) { if (!getAttributeName() .equals(other.getAttributeName())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAttributePrefix()) { hash = (37 * hash) + ATTRIBUTEPREFIX_FIELD_NUMBER; hash = (53 * hash) + getAttributePrefix().hashCode(); } if (hasAttributeName()) { hash = (37 * hash) + ATTRIBUTENAME_FIELD_NUMBER; hash = (53 * hash) + getAttributeName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeAttributeKeyProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeKeyProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; attributePrefix_ = "rm.yarn.io"; attributeName_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.attributePrefix_ = attributePrefix_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.attributeName_ = attributeName_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) return this; if (other.hasAttributePrefix()) { attributePrefix_ = other.attributePrefix_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAttributeName()) { attributeName_ = other.attributeName_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasAttributeName()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { attributePrefix_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { attributeName_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object attributePrefix_ = "rm.yarn.io"; /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return Whether the attributePrefix field is set. */ public boolean hasAttributePrefix() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The attributePrefix. */ public java.lang.String getAttributePrefix() { java.lang.Object ref = attributePrefix_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributePrefix_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return The bytes for attributePrefix. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributePrefixBytes() { java.lang.Object ref = attributePrefix_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributePrefix_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @param value The attributePrefix to set. * @return This builder for chaining. */ public Builder setAttributePrefix( java.lang.String value) { if (value == null) { throw new NullPointerException(); } attributePrefix_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @return This builder for chaining. */ public Builder clearAttributePrefix() { attributePrefix_ = getDefaultInstance().getAttributePrefix(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string attributePrefix = 1 [default = "rm.yarn.io"]; * @param value The bytes for attributePrefix to set. * @return This builder for chaining. */ public Builder setAttributePrefixBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } attributePrefix_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object attributeName_ = ""; /** * required string attributeName = 2; * @return Whether the attributeName field is set. */ public boolean hasAttributeName() { return ((bitField0_ & 0x00000002) != 0); } /** * required string attributeName = 2; * @return The attributeName. */ public java.lang.String getAttributeName() { java.lang.Object ref = attributeName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string attributeName = 2; * @return The bytes for attributeName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeNameBytes() { java.lang.Object ref = attributeName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string attributeName = 2; * @param value The attributeName to set. * @return This builder for chaining. */ public Builder setAttributeName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } attributeName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * required string attributeName = 2; * @return This builder for chaining. */ public Builder clearAttributeName() { attributeName_ = getDefaultInstance().getAttributeName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * required string attributeName = 2; * @param value The bytes for attributeName to set. * @return This builder for chaining. */ public Builder setAttributeNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } attributeName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeKeyProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeKeyProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeAttributeKeyProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeAttributeProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ boolean hasAttributeKey(); /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey(); /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder(); /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return Whether the attributeType field is set. */ boolean hasAttributeType(); /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return The attributeType. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType(); /** * optional string attributeValue = 3 [default = ""]; * @return Whether the attributeValue field is set. */ boolean hasAttributeValue(); /** * optional string attributeValue = 3 [default = ""]; * @return The attributeValue. */ java.lang.String getAttributeValue(); /** * optional string attributeValue = 3 [default = ""]; * @return The bytes for attributeValue. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes(); } /** * Protobuf type {@code hadoop.yarn.NodeAttributeProto} */ public static final class NodeAttributeProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeProto) NodeAttributeProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeAttributeProto.newBuilder() to construct. private NodeAttributeProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeAttributeProto() { attributeType_ = 1; attributeValue_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeAttributeProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder.class); } private int bitField0_; public static final int ATTRIBUTEKEY_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_; /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ @java.lang.Override public boolean hasAttributeKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } public static final int ATTRIBUTETYPE_FIELD_NUMBER = 2; private int attributeType_ = 1; /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return Whether the attributeType field is set. */ @java.lang.Override public boolean hasAttributeType() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return The attributeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result; } public static final int ATTRIBUTEVALUE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object attributeValue_ = ""; /** * optional string attributeValue = 3 [default = ""]; * @return Whether the attributeValue field is set. */ @java.lang.Override public boolean hasAttributeValue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string attributeValue = 3 [default = ""]; * @return The attributeValue. */ @java.lang.Override public java.lang.String getAttributeValue() { java.lang.Object ref = attributeValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeValue_ = s; } return s; } } /** * optional string attributeValue = 3 [default = ""]; * @return The bytes for attributeValue. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes() { java.lang.Object ref = attributeValue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeValue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasAttributeKey()) { memoizedIsInitialized = 0; return false; } if (!getAttributeKey().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getAttributeKey()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeEnum(2, attributeType_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, attributeValue_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getAttributeKey()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, attributeType_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, attributeValue_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto) obj; if (hasAttributeKey() != other.hasAttributeKey()) return false; if (hasAttributeKey()) { if (!getAttributeKey() .equals(other.getAttributeKey())) return false; } if (hasAttributeType() != other.hasAttributeType()) return false; if (hasAttributeType()) { if (attributeType_ != other.attributeType_) return false; } if (hasAttributeValue() != other.hasAttributeValue()) return false; if (hasAttributeValue()) { if (!getAttributeValue() .equals(other.getAttributeValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAttributeKey()) { hash = (37 * hash) + ATTRIBUTEKEY_FIELD_NUMBER; hash = (53 * hash) + getAttributeKey().hashCode(); } if (hasAttributeType()) { hash = (37 * hash) + ATTRIBUTETYPE_FIELD_NUMBER; hash = (53 * hash) + attributeType_; } if (hasAttributeValue()) { hash = (37 * hash) + ATTRIBUTEVALUE_FIELD_NUMBER; hash = (53 * hash) + getAttributeValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeAttributeProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAttributeKeyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; attributeKey_ = null; if (attributeKeyBuilder_ != null) { attributeKeyBuilder_.dispose(); attributeKeyBuilder_ = null; } attributeType_ = 1; attributeValue_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.attributeKey_ = attributeKeyBuilder_ == null ? attributeKey_ : attributeKeyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.attributeType_ = attributeType_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.attributeValue_ = attributeValue_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()) return this; if (other.hasAttributeKey()) { mergeAttributeKey(other.getAttributeKey()); } if (other.hasAttributeType()) { setAttributeType(other.getAttributeType()); } if (other.hasAttributeValue()) { attributeValue_ = other.attributeValue_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasAttributeKey()) { return false; } if (!getAttributeKey().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getAttributeKeyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { attributeType_ = tmpRaw; bitField0_ |= 0x00000002; } break; } // case 16 case 26: { attributeValue_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> attributeKeyBuilder_; /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ public boolean hasAttributeKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() { if (attributeKeyBuilder_ == null) { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } else { return attributeKeyBuilder_.getMessage(); } } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder setAttributeKey(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (attributeKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } attributeKey_ = value; } else { attributeKeyBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder setAttributeKey( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (attributeKeyBuilder_ == null) { attributeKey_ = builderForValue.build(); } else { attributeKeyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder mergeAttributeKey(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (attributeKeyBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && attributeKey_ != null && attributeKey_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) { getAttributeKeyBuilder().mergeFrom(value); } else { attributeKey_ = value; } } else { attributeKeyBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder clearAttributeKey() { bitField0_ = (bitField0_ & ~0x00000001); attributeKey_ = null; if (attributeKeyBuilder_ != null) { attributeKeyBuilder_.dispose(); attributeKeyBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getAttributeKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAttributeKeyFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() { if (attributeKeyBuilder_ != null) { return attributeKeyBuilder_.getMessageOrBuilder(); } else { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> getAttributeKeyFieldBuilder() { if (attributeKeyBuilder_ == null) { attributeKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>( getAttributeKey(), getParentForChildren(), isClean()); attributeKey_ = null; } return attributeKeyBuilder_; } private int attributeType_ = 1; /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return Whether the attributeType field is set. */ @java.lang.Override public boolean hasAttributeType() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return The attributeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result; } /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @param value The attributeType to set. * @return This builder for chaining. */ public Builder setAttributeType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; attributeType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING]; * @return This builder for chaining. */ public Builder clearAttributeType() { bitField0_ = (bitField0_ & ~0x00000002); attributeType_ = 1; onChanged(); return this; } private java.lang.Object attributeValue_ = ""; /** * optional string attributeValue = 3 [default = ""]; * @return Whether the attributeValue field is set. */ public boolean hasAttributeValue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string attributeValue = 3 [default = ""]; * @return The attributeValue. */ public java.lang.String getAttributeValue() { java.lang.Object ref = attributeValue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string attributeValue = 3 [default = ""]; * @return The bytes for attributeValue. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes() { java.lang.Object ref = attributeValue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeValue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string attributeValue = 3 [default = ""]; * @param value The attributeValue to set. * @return This builder for chaining. */ public Builder setAttributeValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } attributeValue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string attributeValue = 3 [default = ""]; * @return This builder for chaining. */ public Builder clearAttributeValue() { attributeValue_ = getDefaultInstance().getAttributeValue(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string attributeValue = 3 [default = ""]; * @param value The bytes for attributeValue to set. * @return This builder for chaining. */ public Builder setAttributeValueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } attributeValue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeAttributeProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeAttributeInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeInfoProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ boolean hasAttributeKey(); /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey(); /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder(); /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return Whether the attributeType field is set. */ boolean hasAttributeType(); /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return The attributeType. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType(); } /** * Protobuf type {@code hadoop.yarn.NodeAttributeInfoProto} */ public static final class NodeAttributeInfoProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeInfoProto) NodeAttributeInfoProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeAttributeInfoProto.newBuilder() to construct. private NodeAttributeInfoProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeAttributeInfoProto() { attributeType_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeAttributeInfoProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder.class); } private int bitField0_; public static final int ATTRIBUTEKEY_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_; /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ @java.lang.Override public boolean hasAttributeKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } public static final int ATTRIBUTETYPE_FIELD_NUMBER = 2; private int attributeType_ = 1; /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return Whether the attributeType field is set. */ @java.lang.Override public boolean hasAttributeType() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return The attributeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasAttributeKey()) { memoizedIsInitialized = 0; return false; } if (!hasAttributeType()) { memoizedIsInitialized = 0; return false; } if (!getAttributeKey().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getAttributeKey()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeEnum(2, attributeType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getAttributeKey()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, attributeType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto) obj; if (hasAttributeKey() != other.hasAttributeKey()) return false; if (hasAttributeKey()) { if (!getAttributeKey() .equals(other.getAttributeKey())) return false; } if (hasAttributeType() != other.hasAttributeType()) return false; if (hasAttributeType()) { if (attributeType_ != other.attributeType_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAttributeKey()) { hash = (37 * hash) + ATTRIBUTEKEY_FIELD_NUMBER; hash = (53 * hash) + getAttributeKey().hashCode(); } if (hasAttributeType()) { hash = (37 * hash) + ATTRIBUTETYPE_FIELD_NUMBER; hash = (53 * hash) + attributeType_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeAttributeInfoProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeInfoProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAttributeKeyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; attributeKey_ = null; if (attributeKeyBuilder_ != null) { attributeKeyBuilder_.dispose(); attributeKeyBuilder_ = null; } attributeType_ = 1; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.attributeKey_ = attributeKeyBuilder_ == null ? attributeKey_ : attributeKeyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.attributeType_ = attributeType_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance()) return this; if (other.hasAttributeKey()) { mergeAttributeKey(other.getAttributeKey()); } if (other.hasAttributeType()) { setAttributeType(other.getAttributeType()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasAttributeKey()) { return false; } if (!hasAttributeType()) { return false; } if (!getAttributeKey().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getAttributeKeyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { attributeType_ = tmpRaw; bitField0_ |= 0x00000002; } break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> attributeKeyBuilder_; /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return Whether the attributeKey field is set. */ public boolean hasAttributeKey() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; * @return The attributeKey. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() { if (attributeKeyBuilder_ == null) { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } else { return attributeKeyBuilder_.getMessage(); } } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder setAttributeKey(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (attributeKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } attributeKey_ = value; } else { attributeKeyBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder setAttributeKey( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (attributeKeyBuilder_ == null) { attributeKey_ = builderForValue.build(); } else { attributeKeyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder mergeAttributeKey(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (attributeKeyBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && attributeKey_ != null && attributeKey_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) { getAttributeKeyBuilder().mergeFrom(value); } else { attributeKey_ = value; } } else { attributeKeyBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public Builder clearAttributeKey() { bitField0_ = (bitField0_ & ~0x00000001); attributeKey_ = null; if (attributeKeyBuilder_ != null) { attributeKeyBuilder_.dispose(); attributeKeyBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getAttributeKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAttributeKeyFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() { if (attributeKeyBuilder_ != null) { return attributeKeyBuilder_.getMessageOrBuilder(); } else { return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_; } } /** * required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> getAttributeKeyFieldBuilder() { if (attributeKeyBuilder_ == null) { attributeKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>( getAttributeKey(), getParentForChildren(), isClean()); attributeKey_ = null; } return attributeKeyBuilder_; } private int attributeType_ = 1; /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return Whether the attributeType field is set. */ @java.lang.Override public boolean hasAttributeType() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return The attributeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result; } /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @param value The attributeType to set. * @return This builder for chaining. */ public Builder setAttributeType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; attributeType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2; * @return This builder for chaining. */ public Builder clearAttributeType() { bitField0_ = (bitField0_ & ~0x00000002); attributeType_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeInfoProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeInfoProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeAttributeInfoProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeToAttributeValueProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeToAttributeValueProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string hostname = 1; * @return Whether the hostname field is set. */ boolean hasHostname(); /** * required string hostname = 1; * @return The hostname. */ java.lang.String getHostname(); /** * required string hostname = 1; * @return The bytes for hostname. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHostnameBytes(); /** * required string attributeValue = 2; * @return Whether the attributeValue field is set. */ boolean hasAttributeValue(); /** * required string attributeValue = 2; * @return The attributeValue. */ java.lang.String getAttributeValue(); /** * required string attributeValue = 2; * @return The bytes for attributeValue. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes(); } /** * Protobuf type {@code hadoop.yarn.NodeToAttributeValueProto} */ public static final class NodeToAttributeValueProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeToAttributeValueProto) NodeToAttributeValueProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeToAttributeValueProto.newBuilder() to construct. private NodeToAttributeValueProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeToAttributeValueProto() { hostname_ = ""; attributeValue_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeToAttributeValueProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder.class); } private int bitField0_; public static final int HOSTNAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object hostname_ = ""; /** * required string hostname = 1; * @return Whether the hostname field is set. */ @java.lang.Override public boolean hasHostname() { return ((bitField0_ & 0x00000001) != 0); } /** * required string hostname = 1; * @return The hostname. */ @java.lang.Override public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostname_ = s; } return s; } } /** * required string hostname = 1; * @return The bytes for hostname. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int ATTRIBUTEVALUE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object attributeValue_ = ""; /** * required string attributeValue = 2; * @return Whether the attributeValue field is set. */ @java.lang.Override public boolean hasAttributeValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required string attributeValue = 2; * @return The attributeValue. */ @java.lang.Override public java.lang.String getAttributeValue() { java.lang.Object ref = attributeValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeValue_ = s; } return s; } } /** * required string attributeValue = 2; * @return The bytes for attributeValue. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes() { java.lang.Object ref = attributeValue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeValue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasHostname()) { memoizedIsInitialized = 0; return false; } if (!hasAttributeValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, hostname_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, attributeValue_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, hostname_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, attributeValue_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto) obj; if (hasHostname() != other.hasHostname()) return false; if (hasHostname()) { if (!getHostname() .equals(other.getHostname())) return false; } if (hasAttributeValue() != other.hasAttributeValue()) return false; if (hasAttributeValue()) { if (!getAttributeValue() .equals(other.getAttributeValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHostname()) { hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; hash = (53 * hash) + getHostname().hashCode(); } if (hasAttributeValue()) { hash = (37 * hash) + ATTRIBUTEVALUE_FIELD_NUMBER; hash = (53 * hash) + getAttributeValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeToAttributeValueProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeToAttributeValueProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; hostname_ = ""; attributeValue_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.hostname_ = hostname_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.attributeValue_ = attributeValue_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance()) return this; if (other.hasHostname()) { hostname_ = other.hostname_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAttributeValue()) { attributeValue_ = other.attributeValue_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasHostname()) { return false; } if (!hasAttributeValue()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { hostname_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { attributeValue_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object hostname_ = ""; /** * required string hostname = 1; * @return Whether the hostname field is set. */ public boolean hasHostname() { return ((bitField0_ & 0x00000001) != 0); } /** * required string hostname = 1; * @return The hostname. */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostname_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string hostname = 1; * @return The bytes for hostname. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string hostname = 1; * @param value The hostname to set. * @return This builder for chaining. */ public Builder setHostname( java.lang.String value) { if (value == null) { throw new NullPointerException(); } hostname_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string hostname = 1; * @return This builder for chaining. */ public Builder clearHostname() { hostname_ = getDefaultInstance().getHostname(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string hostname = 1; * @param value The bytes for hostname to set. * @return This builder for chaining. */ public Builder setHostnameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } hostname_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object attributeValue_ = ""; /** * required string attributeValue = 2; * @return Whether the attributeValue field is set. */ public boolean hasAttributeValue() { return ((bitField0_ & 0x00000002) != 0); } /** * required string attributeValue = 2; * @return The attributeValue. */ public java.lang.String getAttributeValue() { java.lang.Object ref = attributeValue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { attributeValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string attributeValue = 2; * @return The bytes for attributeValue. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAttributeValueBytes() { java.lang.Object ref = attributeValue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); attributeValue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string attributeValue = 2; * @param value The attributeValue to set. * @return This builder for chaining. */ public Builder setAttributeValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } attributeValue_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * required string attributeValue = 2; * @return This builder for chaining. */ public Builder clearAttributeValue() { attributeValue_ = getDefaultInstance().getAttributeValue(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * required string attributeValue = 2; * @param value The bytes for attributeValue to set. * @return This builder for chaining. */ public Builder setAttributeValueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } attributeValue_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeToAttributeValueProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeToAttributeValueProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeToAttributeValueProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface AttributeToNodesProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.AttributeToNodesProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return Whether the nodeAttribute field is set. */ boolean hasNodeAttribute(); /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return The nodeAttribute. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute(); /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder(); /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ java.util.List getNodeValueMapList(); /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index); /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ int getNodeValueMapCount(); /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ java.util.List getNodeValueMapOrBuilderList(); /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.AttributeToNodesProto} */ public static final class AttributeToNodesProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.AttributeToNodesProto) AttributeToNodesProtoOrBuilder { private static final long serialVersionUID = 0L; // Use AttributeToNodesProto.newBuilder() to construct. private AttributeToNodesProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private AttributeToNodesProto() { nodeValueMap_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AttributeToNodesProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder.class); } private int bitField0_; public static final int NODEATTRIBUTE_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto nodeAttribute_; /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return Whether the nodeAttribute field is set. */ @java.lang.Override public boolean hasNodeAttribute() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return The nodeAttribute. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute() { return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_; } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder() { return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_; } public static final int NODEVALUEMAP_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List nodeValueMap_; /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ @java.lang.Override public java.util.List getNodeValueMapList() { return nodeValueMap_; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ @java.lang.Override public java.util.List getNodeValueMapOrBuilderList() { return nodeValueMap_; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ @java.lang.Override public int getNodeValueMapCount() { return nodeValueMap_.size(); } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index) { return nodeValueMap_.get(index); } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder( int index) { return nodeValueMap_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasNodeAttribute()) { memoizedIsInitialized = 0; return false; } if (!getNodeAttribute().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getNodeValueMapCount(); i++) { if (!getNodeValueMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNodeAttribute()); } for (int i = 0; i < nodeValueMap_.size(); i++) { output.writeMessage(2, nodeValueMap_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getNodeAttribute()); } for (int i = 0; i < nodeValueMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, nodeValueMap_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto) obj; if (hasNodeAttribute() != other.hasNodeAttribute()) return false; if (hasNodeAttribute()) { if (!getNodeAttribute() .equals(other.getNodeAttribute())) return false; } if (!getNodeValueMapList() .equals(other.getNodeValueMapList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNodeAttribute()) { hash = (37 * hash) + NODEATTRIBUTE_FIELD_NUMBER; hash = (53 * hash) + getNodeAttribute().hashCode(); } if (getNodeValueMapCount() > 0) { hash = (37 * hash) + NODEVALUEMAP_FIELD_NUMBER; hash = (53 * hash) + getNodeValueMapList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.AttributeToNodesProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.AttributeToNodesProto) org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getNodeAttributeFieldBuilder(); getNodeValueMapFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; nodeAttribute_ = null; if (nodeAttributeBuilder_ != null) { nodeAttributeBuilder_.dispose(); nodeAttributeBuilder_ = null; } if (nodeValueMapBuilder_ == null) { nodeValueMap_ = java.util.Collections.emptyList(); } else { nodeValueMap_ = null; nodeValueMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto build() { org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result) { if (nodeValueMapBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { nodeValueMap_ = java.util.Collections.unmodifiableList(nodeValueMap_); bitField0_ = (bitField0_ & ~0x00000002); } result.nodeValueMap_ = nodeValueMap_; } else { result.nodeValueMap_ = nodeValueMapBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.nodeAttribute_ = nodeAttributeBuilder_ == null ? nodeAttribute_ : nodeAttributeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance()) return this; if (other.hasNodeAttribute()) { mergeNodeAttribute(other.getNodeAttribute()); } if (nodeValueMapBuilder_ == null) { if (!other.nodeValueMap_.isEmpty()) { if (nodeValueMap_.isEmpty()) { nodeValueMap_ = other.nodeValueMap_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNodeValueMapIsMutable(); nodeValueMap_.addAll(other.nodeValueMap_); } onChanged(); } } else { if (!other.nodeValueMap_.isEmpty()) { if (nodeValueMapBuilder_.isEmpty()) { nodeValueMapBuilder_.dispose(); nodeValueMapBuilder_ = null; nodeValueMap_ = other.nodeValueMap_; bitField0_ = (bitField0_ & ~0x00000002); nodeValueMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeValueMapFieldBuilder() : null; } else { nodeValueMapBuilder_.addAllMessages(other.nodeValueMap_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasNodeAttribute()) { return false; } if (!getNodeAttribute().isInitialized()) { return false; } for (int i = 0; i < getNodeValueMapCount(); i++) { if (!getNodeValueMap(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNodeAttributeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.PARSER, extensionRegistry); if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); nodeValueMap_.add(m); } else { nodeValueMapBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto nodeAttribute_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> nodeAttributeBuilder_; /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return Whether the nodeAttribute field is set. */ public boolean hasNodeAttribute() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; * @return The nodeAttribute. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute() { if (nodeAttributeBuilder_ == null) { return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_; } else { return nodeAttributeBuilder_.getMessage(); } } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public Builder setNodeAttribute(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (nodeAttributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } nodeAttribute_ = value; } else { nodeAttributeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public Builder setNodeAttribute( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) { if (nodeAttributeBuilder_ == null) { nodeAttribute_ = builderForValue.build(); } else { nodeAttributeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public Builder mergeNodeAttribute(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) { if (nodeAttributeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && nodeAttribute_ != null && nodeAttribute_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) { getNodeAttributeBuilder().mergeFrom(value); } else { nodeAttribute_ = value; } } else { nodeAttributeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public Builder clearNodeAttribute() { bitField0_ = (bitField0_ & ~0x00000001); nodeAttribute_ = null; if (nodeAttributeBuilder_ != null) { nodeAttributeBuilder_.dispose(); nodeAttributeBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getNodeAttributeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNodeAttributeFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder() { if (nodeAttributeBuilder_ != null) { return nodeAttributeBuilder_.getMessageOrBuilder(); } else { return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_; } } /** * required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> getNodeAttributeFieldBuilder() { if (nodeAttributeBuilder_ == null) { nodeAttributeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>( getNodeAttribute(), getParentForChildren(), isClean()); nodeAttribute_ = null; } return nodeAttributeBuilder_; } private java.util.List nodeValueMap_ = java.util.Collections.emptyList(); private void ensureNodeValueMapIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { nodeValueMap_ = new java.util.ArrayList(nodeValueMap_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> nodeValueMapBuilder_; /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public java.util.List getNodeValueMapList() { if (nodeValueMapBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeValueMap_); } else { return nodeValueMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public int getNodeValueMapCount() { if (nodeValueMapBuilder_ == null) { return nodeValueMap_.size(); } else { return nodeValueMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index) { if (nodeValueMapBuilder_ == null) { return nodeValueMap_.get(index); } else { return nodeValueMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder setNodeValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) { if (nodeValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeValueMapIsMutable(); nodeValueMap_.set(index, value); onChanged(); } else { nodeValueMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder setNodeValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) { if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); nodeValueMap_.set(index, builderForValue.build()); onChanged(); } else { nodeValueMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder addNodeValueMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) { if (nodeValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeValueMapIsMutable(); nodeValueMap_.add(value); onChanged(); } else { nodeValueMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder addNodeValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) { if (nodeValueMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeValueMapIsMutable(); nodeValueMap_.add(index, value); onChanged(); } else { nodeValueMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder addNodeValueMap( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) { if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); nodeValueMap_.add(builderForValue.build()); onChanged(); } else { nodeValueMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder addNodeValueMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) { if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); nodeValueMap_.add(index, builderForValue.build()); onChanged(); } else { nodeValueMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder addAllNodeValueMap( java.lang.Iterable values) { if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeValueMap_); onChanged(); } else { nodeValueMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder clearNodeValueMap() { if (nodeValueMapBuilder_ == null) { nodeValueMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { nodeValueMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public Builder removeNodeValueMap(int index) { if (nodeValueMapBuilder_ == null) { ensureNodeValueMapIsMutable(); nodeValueMap_.remove(index); onChanged(); } else { nodeValueMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder getNodeValueMapBuilder( int index) { return getNodeValueMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder( int index) { if (nodeValueMapBuilder_ == null) { return nodeValueMap_.get(index); } else { return nodeValueMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public java.util.List getNodeValueMapOrBuilderList() { if (nodeValueMapBuilder_ != null) { return nodeValueMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeValueMap_); } } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder addNodeValueMapBuilder() { return getNodeValueMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder addNodeValueMapBuilder( int index) { return getNodeValueMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2; */ public java.util.List getNodeValueMapBuilderList() { return getNodeValueMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> getNodeValueMapFieldBuilder() { if (nodeValueMapBuilder_ == null) { nodeValueMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder>( nodeValueMap_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); nodeValueMap_ = null; } return nodeValueMapBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.AttributeToNodesProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.AttributeToNodesProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public AttributeToNodesProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NodeToAttributesProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeToAttributesProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string node = 1; * @return Whether the node field is set. */ boolean hasNode(); /** * optional string node = 1; * @return The node. */ java.lang.String getNode(); /** * optional string node = 1; * @return The bytes for node. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeBytes(); /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ java.util.List getNodeAttributesList(); /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index); /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ int getNodeAttributesCount(); /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ java.util.List getNodeAttributesOrBuilderList(); /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.NodeToAttributesProto} */ public static final class NodeToAttributesProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeToAttributesProto) NodeToAttributesProtoOrBuilder { private static final long serialVersionUID = 0L; // Use NodeToAttributesProto.newBuilder() to construct. private NodeToAttributesProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private NodeToAttributesProto() { node_ = ""; nodeAttributes_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new NodeToAttributesProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder.class); } private int bitField0_; public static final int NODE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object node_ = ""; /** * optional string node = 1; * @return Whether the node field is set. */ @java.lang.Override public boolean hasNode() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string node = 1; * @return The node. */ @java.lang.Override public java.lang.String getNode() { java.lang.Object ref = node_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { node_ = s; } return s; } } /** * optional string node = 1; * @return The bytes for node. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeBytes() { java.lang.Object ref = node_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); node_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int NODEATTRIBUTES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List nodeAttributes_; /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ @java.lang.Override public java.util.List getNodeAttributesList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ @java.lang.Override public java.util.List getNodeAttributesOrBuilderList() { return nodeAttributes_; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ @java.lang.Override public int getNodeAttributesCount() { return nodeAttributes_.size(); } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) { return nodeAttributes_.get(index); } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index) { return nodeAttributes_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, node_); } for (int i = 0; i < nodeAttributes_.size(); i++) { output.writeMessage(2, nodeAttributes_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, node_); } for (int i = 0; i < nodeAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, nodeAttributes_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto) obj; if (hasNode() != other.hasNode()) return false; if (hasNode()) { if (!getNode() .equals(other.getNode())) return false; } if (!getNodeAttributesList() .equals(other.getNodeAttributesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNode()) { hash = (37 * hash) + NODE_FIELD_NUMBER; hash = (53 * hash) + getNode().hashCode(); } if (getNodeAttributesCount() > 0) { hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getNodeAttributesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.NodeToAttributesProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeToAttributesProto) org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; node_ = ""; if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); } else { nodeAttributes_ = null; nodeAttributesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto build() { org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result) { if (nodeAttributesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_); bitField0_ = (bitField0_ & ~0x00000002); } result.nodeAttributes_ = nodeAttributes_; } else { result.nodeAttributes_ = nodeAttributesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.node_ = node_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance()) return this; if (other.hasNode()) { node_ = other.node_; bitField0_ |= 0x00000001; onChanged(); } if (nodeAttributesBuilder_ == null) { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributes_.isEmpty()) { nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureNodeAttributesIsMutable(); nodeAttributes_.addAll(other.nodeAttributes_); } onChanged(); } } else { if (!other.nodeAttributes_.isEmpty()) { if (nodeAttributesBuilder_.isEmpty()) { nodeAttributesBuilder_.dispose(); nodeAttributesBuilder_ = null; nodeAttributes_ = other.nodeAttributes_; bitField0_ = (bitField0_ & ~0x00000002); nodeAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNodeAttributesFieldBuilder() : null; } else { nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getNodeAttributesCount(); i++) { if (!getNodeAttributes(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { node_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.PARSER, extensionRegistry); if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(m); } else { nodeAttributesBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object node_ = ""; /** * optional string node = 1; * @return Whether the node field is set. */ public boolean hasNode() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string node = 1; * @return The node. */ public java.lang.String getNode() { java.lang.Object ref = node_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { node_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string node = 1; * @return The bytes for node. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeBytes() { java.lang.Object ref = node_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); node_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string node = 1; * @param value The node to set. * @return This builder for chaining. */ public Builder setNode( java.lang.String value) { if (value == null) { throw new NullPointerException(); } node_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string node = 1; * @return This builder for chaining. */ public Builder clearNode() { node_ = getDefaultInstance().getNode(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string node = 1; * @param value The bytes for node to set. * @return This builder for chaining. */ public Builder setNodeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } node_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List nodeAttributes_ = java.util.Collections.emptyList(); private void ensureNodeAttributesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { nodeAttributes_ = new java.util.ArrayList(nodeAttributes_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> nodeAttributesBuilder_; /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public java.util.List getNodeAttributesList() { if (nodeAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(nodeAttributes_); } else { return nodeAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public int getNodeAttributesCount() { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.size(); } else { return nodeAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, value); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder setNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.set(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder addNodeAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(value); onChanged(); } else { nodeAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) { if (nodeAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, value); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder addNodeAttributes( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder addNodeAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.add(index, builderForValue.build()); onChanged(); } else { nodeAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder addAllNodeAttributes( java.lang.Iterable values) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, nodeAttributes_); onChanged(); } else { nodeAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder clearNodeAttributes() { if (nodeAttributesBuilder_ == null) { nodeAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { nodeAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public Builder removeNodeAttributes(int index) { if (nodeAttributesBuilder_ == null) { ensureNodeAttributesIsMutable(); nodeAttributes_.remove(index); onChanged(); } else { nodeAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder getNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder( int index) { if (nodeAttributesBuilder_ == null) { return nodeAttributes_.get(index); } else { return nodeAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public java.util.List getNodeAttributesOrBuilderList() { if (nodeAttributesBuilder_ != null) { return nodeAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(nodeAttributes_); } } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder() { return getNodeAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder( int index) { return getNodeAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2; */ public java.util.List getNodeAttributesBuilderList() { return getNodeAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> getNodeAttributesFieldBuilder() { if (nodeAttributesBuilder_ == null) { nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>( nodeAttributes_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); nodeAttributes_ = null; } return nodeAttributesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeToAttributesProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeToAttributesProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public NodeToAttributesProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface DeregisterSubClustersProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.DeregisterSubClustersProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ boolean hasSubClusterId(); /** * optional string subClusterId = 1; * @return The subClusterId. */ java.lang.String getSubClusterId(); /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes(); /** * optional string deregisterState = 2; * @return Whether the deregisterState field is set. */ boolean hasDeregisterState(); /** * optional string deregisterState = 2; * @return The deregisterState. */ java.lang.String getDeregisterState(); /** * optional string deregisterState = 2; * @return The bytes for deregisterState. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDeregisterStateBytes(); /** * optional string lastHeartBeatTime = 3; * @return Whether the lastHeartBeatTime field is set. */ boolean hasLastHeartBeatTime(); /** * optional string lastHeartBeatTime = 3; * @return The lastHeartBeatTime. */ java.lang.String getLastHeartBeatTime(); /** * optional string lastHeartBeatTime = 3; * @return The bytes for lastHeartBeatTime. */ org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes(); /** * optional string information = 4; * @return Whether the information field is set. */ boolean hasInformation(); /** * optional string information = 4; * @return The information. */ java.lang.String getInformation(); /** * optional string information = 4; * @return The bytes for information. */ org.apache.hadoop.thirdparty.protobuf.ByteString getInformationBytes(); /** * optional string subClusterState = 5; * @return Whether the subClusterState field is set. */ boolean hasSubClusterState(); /** * optional string subClusterState = 5; * @return The subClusterState. */ java.lang.String getSubClusterState(); /** * optional string subClusterState = 5; * @return The bytes for subClusterState. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes(); } /** * Protobuf type {@code hadoop.yarn.DeregisterSubClustersProto} */ public static final class DeregisterSubClustersProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.DeregisterSubClustersProto) DeregisterSubClustersProtoOrBuilder { private static final long serialVersionUID = 0L; // Use DeregisterSubClustersProto.newBuilder() to construct. private DeregisterSubClustersProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private DeregisterSubClustersProto() { subClusterId_ = ""; deregisterState_ = ""; lastHeartBeatTime_ = ""; information_ = ""; subClusterState_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new DeregisterSubClustersProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.class, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder.class); } private int bitField0_; public static final int SUBCLUSTERID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ @java.lang.Override public boolean hasSubClusterId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string subClusterId = 1; * @return The subClusterId. */ @java.lang.Override public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } } /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int DEREGISTERSTATE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object deregisterState_ = ""; /** * optional string deregisterState = 2; * @return Whether the deregisterState field is set. */ @java.lang.Override public boolean hasDeregisterState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string deregisterState = 2; * @return The deregisterState. */ @java.lang.Override public java.lang.String getDeregisterState() { java.lang.Object ref = deregisterState_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { deregisterState_ = s; } return s; } } /** * optional string deregisterState = 2; * @return The bytes for deregisterState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDeregisterStateBytes() { java.lang.Object ref = deregisterState_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); deregisterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LASTHEARTBEATTIME_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object lastHeartBeatTime_ = ""; /** * optional string lastHeartBeatTime = 3; * @return Whether the lastHeartBeatTime field is set. */ @java.lang.Override public boolean hasLastHeartBeatTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string lastHeartBeatTime = 3; * @return The lastHeartBeatTime. */ @java.lang.Override public java.lang.String getLastHeartBeatTime() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { lastHeartBeatTime_ = s; } return s; } } /** * optional string lastHeartBeatTime = 3; * @return The bytes for lastHeartBeatTime. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); lastHeartBeatTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int INFORMATION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object information_ = ""; /** * optional string information = 4; * @return Whether the information field is set. */ @java.lang.Override public boolean hasInformation() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string information = 4; * @return The information. */ @java.lang.Override public java.lang.String getInformation() { java.lang.Object ref = information_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { information_ = s; } return s; } } /** * optional string information = 4; * @return The bytes for information. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getInformationBytes() { java.lang.Object ref = information_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); information_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int SUBCLUSTERSTATE_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object subClusterState_ = ""; /** * optional string subClusterState = 5; * @return Whether the subClusterState field is set. */ @java.lang.Override public boolean hasSubClusterState() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string subClusterState = 5; * @return The subClusterState. */ @java.lang.Override public java.lang.String getSubClusterState() { java.lang.Object ref = subClusterState_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterState_ = s; } return s; } } /** * optional string subClusterState = 5; * @return The bytes for subClusterState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes() { java.lang.Object ref = subClusterState_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, deregisterState_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, lastHeartBeatTime_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, information_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, subClusterState_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, deregisterState_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, lastHeartBeatTime_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, information_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, subClusterState_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto) obj; if (hasSubClusterId() != other.hasSubClusterId()) return false; if (hasSubClusterId()) { if (!getSubClusterId() .equals(other.getSubClusterId())) return false; } if (hasDeregisterState() != other.hasDeregisterState()) return false; if (hasDeregisterState()) { if (!getDeregisterState() .equals(other.getDeregisterState())) return false; } if (hasLastHeartBeatTime() != other.hasLastHeartBeatTime()) return false; if (hasLastHeartBeatTime()) { if (!getLastHeartBeatTime() .equals(other.getLastHeartBeatTime())) return false; } if (hasInformation() != other.hasInformation()) return false; if (hasInformation()) { if (!getInformation() .equals(other.getInformation())) return false; } if (hasSubClusterState() != other.hasSubClusterState()) return false; if (hasSubClusterState()) { if (!getSubClusterState() .equals(other.getSubClusterState())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSubClusterId()) { hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER; hash = (53 * hash) + getSubClusterId().hashCode(); } if (hasDeregisterState()) { hash = (37 * hash) + DEREGISTERSTATE_FIELD_NUMBER; hash = (53 * hash) + getDeregisterState().hashCode(); } if (hasLastHeartBeatTime()) { hash = (37 * hash) + LASTHEARTBEATTIME_FIELD_NUMBER; hash = (53 * hash) + getLastHeartBeatTime().hashCode(); } if (hasInformation()) { hash = (37 * hash) + INFORMATION_FIELD_NUMBER; hash = (53 * hash) + getInformation().hashCode(); } if (hasSubClusterState()) { hash = (37 * hash) + SUBCLUSTERSTATE_FIELD_NUMBER; hash = (53 * hash) + getSubClusterState().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.DeregisterSubClustersProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.DeregisterSubClustersProto) org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.class, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; subClusterId_ = ""; deregisterState_ = ""; lastHeartBeatTime_ = ""; information_ = ""; subClusterState_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto build() { org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result = new org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.subClusterId_ = subClusterId_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.deregisterState_ = deregisterState_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.lastHeartBeatTime_ = lastHeartBeatTime_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.information_ = information_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.subClusterState_ = subClusterState_; to_bitField0_ |= 0x00000010; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance()) return this; if (other.hasSubClusterId()) { subClusterId_ = other.subClusterId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasDeregisterState()) { deregisterState_ = other.deregisterState_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasLastHeartBeatTime()) { lastHeartBeatTime_ = other.lastHeartBeatTime_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasInformation()) { information_ = other.information_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasSubClusterState()) { subClusterState_ = other.subClusterState_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { subClusterId_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { deregisterState_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { lastHeartBeatTime_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { information_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { subClusterState_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ public boolean hasSubClusterId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string subClusterId = 1; * @return The subClusterId. */ public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string subClusterId = 1; * @param value The subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string subClusterId = 1; * @return This builder for chaining. */ public Builder clearSubClusterId() { subClusterId_ = getDefaultInstance().getSubClusterId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string subClusterId = 1; * @param value The bytes for subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterIdBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object deregisterState_ = ""; /** * optional string deregisterState = 2; * @return Whether the deregisterState field is set. */ public boolean hasDeregisterState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string deregisterState = 2; * @return The deregisterState. */ public java.lang.String getDeregisterState() { java.lang.Object ref = deregisterState_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { deregisterState_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string deregisterState = 2; * @return The bytes for deregisterState. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDeregisterStateBytes() { java.lang.Object ref = deregisterState_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); deregisterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string deregisterState = 2; * @param value The deregisterState to set. * @return This builder for chaining. */ public Builder setDeregisterState( java.lang.String value) { if (value == null) { throw new NullPointerException(); } deregisterState_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string deregisterState = 2; * @return This builder for chaining. */ public Builder clearDeregisterState() { deregisterState_ = getDefaultInstance().getDeregisterState(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string deregisterState = 2; * @param value The bytes for deregisterState to set. * @return This builder for chaining. */ public Builder setDeregisterStateBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } deregisterState_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object lastHeartBeatTime_ = ""; /** * optional string lastHeartBeatTime = 3; * @return Whether the lastHeartBeatTime field is set. */ public boolean hasLastHeartBeatTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string lastHeartBeatTime = 3; * @return The lastHeartBeatTime. */ public java.lang.String getLastHeartBeatTime() { java.lang.Object ref = lastHeartBeatTime_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { lastHeartBeatTime_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string lastHeartBeatTime = 3; * @return The bytes for lastHeartBeatTime. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); lastHeartBeatTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string lastHeartBeatTime = 3; * @param value The lastHeartBeatTime to set. * @return This builder for chaining. */ public Builder setLastHeartBeatTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } lastHeartBeatTime_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string lastHeartBeatTime = 3; * @return This builder for chaining. */ public Builder clearLastHeartBeatTime() { lastHeartBeatTime_ = getDefaultInstance().getLastHeartBeatTime(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string lastHeartBeatTime = 3; * @param value The bytes for lastHeartBeatTime to set. * @return This builder for chaining. */ public Builder setLastHeartBeatTimeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } lastHeartBeatTime_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object information_ = ""; /** * optional string information = 4; * @return Whether the information field is set. */ public boolean hasInformation() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string information = 4; * @return The information. */ public java.lang.String getInformation() { java.lang.Object ref = information_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { information_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string information = 4; * @return The bytes for information. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getInformationBytes() { java.lang.Object ref = information_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); information_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string information = 4; * @param value The information to set. * @return This builder for chaining. */ public Builder setInformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } information_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string information = 4; * @return This builder for chaining. */ public Builder clearInformation() { information_ = getDefaultInstance().getInformation(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string information = 4; * @param value The bytes for information to set. * @return This builder for chaining. */ public Builder setInformationBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } information_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object subClusterState_ = ""; /** * optional string subClusterState = 5; * @return Whether the subClusterState field is set. */ public boolean hasSubClusterState() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string subClusterState = 5; * @return The subClusterState. */ public java.lang.String getSubClusterState() { java.lang.Object ref = subClusterState_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterState_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string subClusterState = 5; * @return The bytes for subClusterState. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes() { java.lang.Object ref = subClusterState_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string subClusterState = 5; * @param value The subClusterState to set. * @return This builder for chaining. */ public Builder setSubClusterState( java.lang.String value) { if (value == null) { throw new NullPointerException(); } subClusterState_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string subClusterState = 5; * @return This builder for chaining. */ public Builder clearSubClusterState() { subClusterState_ = getDefaultInstance().getSubClusterState(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string subClusterState = 5; * @param value The bytes for subClusterState to set. * @return This builder for chaining. */ public Builder setSubClusterStateBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } subClusterState_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.DeregisterSubClustersProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.DeregisterSubClustersProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public DeregisterSubClustersProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FederationQueueWeightProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FederationQueueWeightProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string routerWeight = 1; * @return Whether the routerWeight field is set. */ boolean hasRouterWeight(); /** * optional string routerWeight = 1; * @return The routerWeight. */ java.lang.String getRouterWeight(); /** * optional string routerWeight = 1; * @return The bytes for routerWeight. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRouterWeightBytes(); /** * optional string amrmWeight = 2; * @return Whether the amrmWeight field is set. */ boolean hasAmrmWeight(); /** * optional string amrmWeight = 2; * @return The amrmWeight. */ java.lang.String getAmrmWeight(); /** * optional string amrmWeight = 2; * @return The bytes for amrmWeight. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAmrmWeightBytes(); /** * optional string headRoomAlpha = 3; * @return Whether the headRoomAlpha field is set. */ boolean hasHeadRoomAlpha(); /** * optional string headRoomAlpha = 3; * @return The headRoomAlpha. */ java.lang.String getHeadRoomAlpha(); /** * optional string headRoomAlpha = 3; * @return The bytes for headRoomAlpha. */ org.apache.hadoop.thirdparty.protobuf.ByteString getHeadRoomAlphaBytes(); /** * optional string queue = 4; * @return Whether the queue field is set. */ boolean hasQueue(); /** * optional string queue = 4; * @return The queue. */ java.lang.String getQueue(); /** * optional string queue = 4; * @return The bytes for queue. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * optional string policyManagerClassName = 5; * @return Whether the policyManagerClassName field is set. */ boolean hasPolicyManagerClassName(); /** * optional string policyManagerClassName = 5; * @return The policyManagerClassName. */ java.lang.String getPolicyManagerClassName(); /** * optional string policyManagerClassName = 5; * @return The bytes for policyManagerClassName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getPolicyManagerClassNameBytes(); } /** * Protobuf type {@code hadoop.yarn.FederationQueueWeightProto} */ public static final class FederationQueueWeightProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FederationQueueWeightProto) FederationQueueWeightProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FederationQueueWeightProto.newBuilder() to construct. private FederationQueueWeightProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FederationQueueWeightProto() { routerWeight_ = ""; amrmWeight_ = ""; headRoomAlpha_ = ""; queue_ = ""; policyManagerClassName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new FederationQueueWeightProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder.class); } private int bitField0_; public static final int ROUTERWEIGHT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object routerWeight_ = ""; /** * optional string routerWeight = 1; * @return Whether the routerWeight field is set. */ @java.lang.Override public boolean hasRouterWeight() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string routerWeight = 1; * @return The routerWeight. */ @java.lang.Override public java.lang.String getRouterWeight() { java.lang.Object ref = routerWeight_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { routerWeight_ = s; } return s; } } /** * optional string routerWeight = 1; * @return The bytes for routerWeight. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRouterWeightBytes() { java.lang.Object ref = routerWeight_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); routerWeight_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int AMRMWEIGHT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object amrmWeight_ = ""; /** * optional string amrmWeight = 2; * @return Whether the amrmWeight field is set. */ @java.lang.Override public boolean hasAmrmWeight() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string amrmWeight = 2; * @return The amrmWeight. */ @java.lang.Override public java.lang.String getAmrmWeight() { java.lang.Object ref = amrmWeight_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { amrmWeight_ = s; } return s; } } /** * optional string amrmWeight = 2; * @return The bytes for amrmWeight. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAmrmWeightBytes() { java.lang.Object ref = amrmWeight_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); amrmWeight_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int HEADROOMALPHA_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object headRoomAlpha_ = ""; /** * optional string headRoomAlpha = 3; * @return Whether the headRoomAlpha field is set. */ @java.lang.Override public boolean hasHeadRoomAlpha() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string headRoomAlpha = 3; * @return The headRoomAlpha. */ @java.lang.Override public java.lang.String getHeadRoomAlpha() { java.lang.Object ref = headRoomAlpha_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { headRoomAlpha_ = s; } return s; } } /** * optional string headRoomAlpha = 3; * @return The bytes for headRoomAlpha. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHeadRoomAlphaBytes() { java.lang.Object ref = headRoomAlpha_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); headRoomAlpha_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int QUEUE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object queue_ = ""; /** * optional string queue = 4; * @return Whether the queue field is set. */ @java.lang.Override public boolean hasQueue() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string queue = 4; * @return The queue. */ @java.lang.Override public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 4; * @return The bytes for queue. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int POLICYMANAGERCLASSNAME_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object policyManagerClassName_ = ""; /** * optional string policyManagerClassName = 5; * @return Whether the policyManagerClassName field is set. */ @java.lang.Override public boolean hasPolicyManagerClassName() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string policyManagerClassName = 5; * @return The policyManagerClassName. */ @java.lang.Override public java.lang.String getPolicyManagerClassName() { java.lang.Object ref = policyManagerClassName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { policyManagerClassName_ = s; } return s; } } /** * optional string policyManagerClassName = 5; * @return The bytes for policyManagerClassName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPolicyManagerClassNameBytes() { java.lang.Object ref = policyManagerClassName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); policyManagerClassName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, routerWeight_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, amrmWeight_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, headRoomAlpha_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, queue_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, policyManagerClassName_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, routerWeight_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, amrmWeight_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, headRoomAlpha_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, queue_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, policyManagerClassName_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto) obj; if (hasRouterWeight() != other.hasRouterWeight()) return false; if (hasRouterWeight()) { if (!getRouterWeight() .equals(other.getRouterWeight())) return false; } if (hasAmrmWeight() != other.hasAmrmWeight()) return false; if (hasAmrmWeight()) { if (!getAmrmWeight() .equals(other.getAmrmWeight())) return false; } if (hasHeadRoomAlpha() != other.hasHeadRoomAlpha()) return false; if (hasHeadRoomAlpha()) { if (!getHeadRoomAlpha() .equals(other.getHeadRoomAlpha())) return false; } if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (hasPolicyManagerClassName() != other.hasPolicyManagerClassName()) return false; if (hasPolicyManagerClassName()) { if (!getPolicyManagerClassName() .equals(other.getPolicyManagerClassName())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRouterWeight()) { hash = (37 * hash) + ROUTERWEIGHT_FIELD_NUMBER; hash = (53 * hash) + getRouterWeight().hashCode(); } if (hasAmrmWeight()) { hash = (37 * hash) + AMRMWEIGHT_FIELD_NUMBER; hash = (53 * hash) + getAmrmWeight().hashCode(); } if (hasHeadRoomAlpha()) { hash = (37 * hash) + HEADROOMALPHA_FIELD_NUMBER; hash = (53 * hash) + getHeadRoomAlpha().hashCode(); } if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasPolicyManagerClassName()) { hash = (37 * hash) + POLICYMANAGERCLASSNAME_FIELD_NUMBER; hash = (53 * hash) + getPolicyManagerClassName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FederationQueueWeightProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FederationQueueWeightProto) org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; routerWeight_ = ""; amrmWeight_ = ""; headRoomAlpha_ = ""; queue_ = ""; policyManagerClassName_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto build() { org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result = new org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.routerWeight_ = routerWeight_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.amrmWeight_ = amrmWeight_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.headRoomAlpha_ = headRoomAlpha_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.queue_ = queue_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.policyManagerClassName_ = policyManagerClassName_; to_bitField0_ |= 0x00000010; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance()) return this; if (other.hasRouterWeight()) { routerWeight_ = other.routerWeight_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAmrmWeight()) { amrmWeight_ = other.amrmWeight_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasHeadRoomAlpha()) { headRoomAlpha_ = other.headRoomAlpha_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasQueue()) { queue_ = other.queue_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasPolicyManagerClassName()) { policyManagerClassName_ = other.policyManagerClassName_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { routerWeight_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { amrmWeight_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { headRoomAlpha_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { queue_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { policyManagerClassName_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object routerWeight_ = ""; /** * optional string routerWeight = 1; * @return Whether the routerWeight field is set. */ public boolean hasRouterWeight() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string routerWeight = 1; * @return The routerWeight. */ public java.lang.String getRouterWeight() { java.lang.Object ref = routerWeight_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { routerWeight_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string routerWeight = 1; * @return The bytes for routerWeight. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRouterWeightBytes() { java.lang.Object ref = routerWeight_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); routerWeight_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string routerWeight = 1; * @param value The routerWeight to set. * @return This builder for chaining. */ public Builder setRouterWeight( java.lang.String value) { if (value == null) { throw new NullPointerException(); } routerWeight_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string routerWeight = 1; * @return This builder for chaining. */ public Builder clearRouterWeight() { routerWeight_ = getDefaultInstance().getRouterWeight(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string routerWeight = 1; * @param value The bytes for routerWeight to set. * @return This builder for chaining. */ public Builder setRouterWeightBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } routerWeight_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object amrmWeight_ = ""; /** * optional string amrmWeight = 2; * @return Whether the amrmWeight field is set. */ public boolean hasAmrmWeight() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string amrmWeight = 2; * @return The amrmWeight. */ public java.lang.String getAmrmWeight() { java.lang.Object ref = amrmWeight_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { amrmWeight_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string amrmWeight = 2; * @return The bytes for amrmWeight. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAmrmWeightBytes() { java.lang.Object ref = amrmWeight_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); amrmWeight_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string amrmWeight = 2; * @param value The amrmWeight to set. * @return This builder for chaining. */ public Builder setAmrmWeight( java.lang.String value) { if (value == null) { throw new NullPointerException(); } amrmWeight_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string amrmWeight = 2; * @return This builder for chaining. */ public Builder clearAmrmWeight() { amrmWeight_ = getDefaultInstance().getAmrmWeight(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string amrmWeight = 2; * @param value The bytes for amrmWeight to set. * @return This builder for chaining. */ public Builder setAmrmWeightBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } amrmWeight_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object headRoomAlpha_ = ""; /** * optional string headRoomAlpha = 3; * @return Whether the headRoomAlpha field is set. */ public boolean hasHeadRoomAlpha() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string headRoomAlpha = 3; * @return The headRoomAlpha. */ public java.lang.String getHeadRoomAlpha() { java.lang.Object ref = headRoomAlpha_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { headRoomAlpha_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string headRoomAlpha = 3; * @return The bytes for headRoomAlpha. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getHeadRoomAlphaBytes() { java.lang.Object ref = headRoomAlpha_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); headRoomAlpha_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string headRoomAlpha = 3; * @param value The headRoomAlpha to set. * @return This builder for chaining. */ public Builder setHeadRoomAlpha( java.lang.String value) { if (value == null) { throw new NullPointerException(); } headRoomAlpha_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string headRoomAlpha = 3; * @return This builder for chaining. */ public Builder clearHeadRoomAlpha() { headRoomAlpha_ = getDefaultInstance().getHeadRoomAlpha(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string headRoomAlpha = 3; * @param value The bytes for headRoomAlpha to set. * @return This builder for chaining. */ public Builder setHeadRoomAlphaBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } headRoomAlpha_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object queue_ = ""; /** * optional string queue = 4; * @return Whether the queue field is set. */ public boolean hasQueue() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string queue = 4; * @return The queue. */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 4; * @return The bytes for queue. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 4; * @param value The queue to set. * @return This builder for chaining. */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string queue = 4; * @return This builder for chaining. */ public Builder clearQueue() { queue_ = getDefaultInstance().getQueue(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string queue = 4; * @param value The bytes for queue to set. * @return This builder for chaining. */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object policyManagerClassName_ = ""; /** * optional string policyManagerClassName = 5; * @return Whether the policyManagerClassName field is set. */ public boolean hasPolicyManagerClassName() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string policyManagerClassName = 5; * @return The policyManagerClassName. */ public java.lang.String getPolicyManagerClassName() { java.lang.Object ref = policyManagerClassName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { policyManagerClassName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string policyManagerClassName = 5; * @return The bytes for policyManagerClassName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPolicyManagerClassNameBytes() { java.lang.Object ref = policyManagerClassName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); policyManagerClassName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string policyManagerClassName = 5; * @param value The policyManagerClassName to set. * @return This builder for chaining. */ public Builder setPolicyManagerClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } policyManagerClassName_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string policyManagerClassName = 5; * @return This builder for chaining. */ public Builder clearPolicyManagerClassName() { policyManagerClassName_ = getDefaultInstance().getPolicyManagerClassName(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string policyManagerClassName = 5; * @param value The bytes for policyManagerClassName to set. * @return This builder for chaining. */ public Builder setPolicyManagerClassNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } policyManagerClassName_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FederationQueueWeightProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FederationQueueWeightProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FederationQueueWeightProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FederationSubClusterProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.FederationSubClusterProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ boolean hasSubClusterId(); /** * optional string subClusterId = 1; * @return The subClusterId. */ java.lang.String getSubClusterId(); /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes(); /** * optional string lastHeartBeatTime = 2; * @return Whether the lastHeartBeatTime field is set. */ boolean hasLastHeartBeatTime(); /** * optional string lastHeartBeatTime = 2; * @return The lastHeartBeatTime. */ java.lang.String getLastHeartBeatTime(); /** * optional string lastHeartBeatTime = 2; * @return The bytes for lastHeartBeatTime. */ org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes(); /** * optional string subClusterState = 3; * @return Whether the subClusterState field is set. */ boolean hasSubClusterState(); /** * optional string subClusterState = 3; * @return The subClusterState. */ java.lang.String getSubClusterState(); /** * optional string subClusterState = 3; * @return The bytes for subClusterState. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes(); } /** * Protobuf type {@code hadoop.yarn.FederationSubClusterProto} */ public static final class FederationSubClusterProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.FederationSubClusterProto) FederationSubClusterProtoOrBuilder { private static final long serialVersionUID = 0L; // Use FederationSubClusterProto.newBuilder() to construct. private FederationSubClusterProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private FederationSubClusterProto() { subClusterId_ = ""; lastHeartBeatTime_ = ""; subClusterState_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new FederationSubClusterProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.Builder.class); } private int bitField0_; public static final int SUBCLUSTERID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ @java.lang.Override public boolean hasSubClusterId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string subClusterId = 1; * @return The subClusterId. */ @java.lang.Override public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } } /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LASTHEARTBEATTIME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object lastHeartBeatTime_ = ""; /** * optional string lastHeartBeatTime = 2; * @return Whether the lastHeartBeatTime field is set. */ @java.lang.Override public boolean hasLastHeartBeatTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string lastHeartBeatTime = 2; * @return The lastHeartBeatTime. */ @java.lang.Override public java.lang.String getLastHeartBeatTime() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { lastHeartBeatTime_ = s; } return s; } } /** * optional string lastHeartBeatTime = 2; * @return The bytes for lastHeartBeatTime. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); lastHeartBeatTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int SUBCLUSTERSTATE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object subClusterState_ = ""; /** * optional string subClusterState = 3; * @return Whether the subClusterState field is set. */ @java.lang.Override public boolean hasSubClusterState() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string subClusterState = 3; * @return The subClusterState. */ @java.lang.Override public java.lang.String getSubClusterState() { java.lang.Object ref = subClusterState_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterState_ = s; } return s; } } /** * optional string subClusterState = 3; * @return The bytes for subClusterState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes() { java.lang.Object ref = subClusterState_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, lastHeartBeatTime_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subClusterState_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, lastHeartBeatTime_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subClusterState_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto) obj; if (hasSubClusterId() != other.hasSubClusterId()) return false; if (hasSubClusterId()) { if (!getSubClusterId() .equals(other.getSubClusterId())) return false; } if (hasLastHeartBeatTime() != other.hasLastHeartBeatTime()) return false; if (hasLastHeartBeatTime()) { if (!getLastHeartBeatTime() .equals(other.getLastHeartBeatTime())) return false; } if (hasSubClusterState() != other.hasSubClusterState()) return false; if (hasSubClusterState()) { if (!getSubClusterState() .equals(other.getSubClusterState())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSubClusterId()) { hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER; hash = (53 * hash) + getSubClusterId().hashCode(); } if (hasLastHeartBeatTime()) { hash = (37 * hash) + LASTHEARTBEATTIME_FIELD_NUMBER; hash = (53 * hash) + getLastHeartBeatTime().hashCode(); } if (hasSubClusterState()) { hash = (37 * hash) + SUBCLUSTERSTATE_FIELD_NUMBER; hash = (53 * hash) + getSubClusterState().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.FederationSubClusterProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.FederationSubClusterProto) org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; subClusterId_ = ""; lastHeartBeatTime_ = ""; subClusterState_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto build() { org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result = new org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.subClusterId_ = subClusterId_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.lastHeartBeatTime_ = lastHeartBeatTime_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.subClusterState_ = subClusterState_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.getDefaultInstance()) return this; if (other.hasSubClusterId()) { subClusterId_ = other.subClusterId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasLastHeartBeatTime()) { lastHeartBeatTime_ = other.lastHeartBeatTime_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasSubClusterState()) { subClusterState_ = other.subClusterState_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { subClusterId_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { lastHeartBeatTime_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { subClusterState_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 1; * @return Whether the subClusterId field is set. */ public boolean hasSubClusterId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string subClusterId = 1; * @return The subClusterId. */ public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string subClusterId = 1; * @return The bytes for subClusterId. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string subClusterId = 1; * @param value The subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string subClusterId = 1; * @return This builder for chaining. */ public Builder clearSubClusterId() { subClusterId_ = getDefaultInstance().getSubClusterId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string subClusterId = 1; * @param value The bytes for subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterIdBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object lastHeartBeatTime_ = ""; /** * optional string lastHeartBeatTime = 2; * @return Whether the lastHeartBeatTime field is set. */ public boolean hasLastHeartBeatTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string lastHeartBeatTime = 2; * @return The lastHeartBeatTime. */ public java.lang.String getLastHeartBeatTime() { java.lang.Object ref = lastHeartBeatTime_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { lastHeartBeatTime_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string lastHeartBeatTime = 2; * @return The bytes for lastHeartBeatTime. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLastHeartBeatTimeBytes() { java.lang.Object ref = lastHeartBeatTime_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); lastHeartBeatTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string lastHeartBeatTime = 2; * @param value The lastHeartBeatTime to set. * @return This builder for chaining. */ public Builder setLastHeartBeatTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } lastHeartBeatTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string lastHeartBeatTime = 2; * @return This builder for chaining. */ public Builder clearLastHeartBeatTime() { lastHeartBeatTime_ = getDefaultInstance().getLastHeartBeatTime(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string lastHeartBeatTime = 2; * @param value The bytes for lastHeartBeatTime to set. * @return This builder for chaining. */ public Builder setLastHeartBeatTimeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } lastHeartBeatTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object subClusterState_ = ""; /** * optional string subClusterState = 3; * @return Whether the subClusterState field is set. */ public boolean hasSubClusterState() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string subClusterState = 3; * @return The subClusterState. */ public java.lang.String getSubClusterState() { java.lang.Object ref = subClusterState_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterState_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string subClusterState = 3; * @return The bytes for subClusterState. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterStateBytes() { java.lang.Object ref = subClusterState_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterState_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string subClusterState = 3; * @param value The subClusterState to set. * @return This builder for chaining. */ public Builder setSubClusterState( java.lang.String value) { if (value == null) { throw new NullPointerException(); } subClusterState_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string subClusterState = 3; * @return This builder for chaining. */ public Builder clearSubClusterState() { subClusterState_ = getDefaultInstance().getSubClusterState(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string subClusterState = 3; * @param value The bytes for subClusterState to set. * @return This builder for chaining. */ public Builder setSubClusterStateBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } subClusterState_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.FederationSubClusterProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.FederationSubClusterProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public FederationSubClusterProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional string resource_name = 2; * @return Whether the resourceName field is set. */ boolean hasResourceName(); /** * optional string resource_name = 2; * @return The resourceName. */ java.lang.String getResourceName(); /** * optional string resource_name = 2; * @return The bytes for resourceName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getResourceNameBytes(); /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return Whether the capability field is set. */ boolean hasCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return The capability. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder(); /** * optional int32 num_containers = 4; * @return Whether the numContainers field is set. */ boolean hasNumContainers(); /** * optional int32 num_containers = 4; * @return The numContainers. */ int getNumContainers(); /** * optional bool relax_locality = 5 [default = true]; * @return Whether the relaxLocality field is set. */ boolean hasRelaxLocality(); /** * optional bool relax_locality = 5 [default = true]; * @return The relaxLocality. */ boolean getRelaxLocality(); /** * optional string node_label_expression = 6; * @return Whether the nodeLabelExpression field is set. */ boolean hasNodeLabelExpression(); /** * optional string node_label_expression = 6; * @return The nodeLabelExpression. */ java.lang.String getNodeLabelExpression(); /** * optional string node_label_expression = 6; * @return The bytes for nodeLabelExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return Whether the executionTypeRequest field is set. */ boolean hasExecutionTypeRequest(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return The executionTypeRequest. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder(); /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ boolean hasAllocationRequestId(); /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ long getAllocationRequestId(); } /** *
   *//////////////////////////////////////////////////////////////////////
   * //// From AM_RM_Protocol /////////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * 
* * Protobuf type {@code hadoop.yarn.ResourceRequestProto} */ public static final class ResourceRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceRequestProto) ResourceRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceRequestProto.newBuilder() to construct. private ResourceRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceRequestProto() { resourceName_ = ""; relaxLocality_ = true; nodeLabelExpression_ = ""; allocationRequestId_ = -1L; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder.class); } private int bitField0_; public static final int PRIORITY_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int RESOURCE_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * optional string resource_name = 2; * @return Whether the resourceName field is set. */ @java.lang.Override public boolean hasResourceName() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resource_name = 2; * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceName_ = s; } return s; } } /** * optional string resource_name = 2; * @return The bytes for resourceName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CAPABILITY_FIELD_NUMBER = 3; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return Whether the capability field is set. */ @java.lang.Override public boolean hasCapability() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return The capability. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } public static final int NUM_CONTAINERS_FIELD_NUMBER = 4; private int numContainers_ = 0; /** * optional int32 num_containers = 4; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 num_containers = 4; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } public static final int RELAX_LOCALITY_FIELD_NUMBER = 5; private boolean relaxLocality_ = true; /** * optional bool relax_locality = 5 [default = true]; * @return Whether the relaxLocality field is set. */ @java.lang.Override public boolean hasRelaxLocality() { return ((bitField0_ & 0x00000010) != 0); } /** * optional bool relax_locality = 5 [default = true]; * @return The relaxLocality. */ @java.lang.Override public boolean getRelaxLocality() { return relaxLocality_; } public static final int NODE_LABEL_EXPRESSION_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object nodeLabelExpression_ = ""; /** * optional string node_label_expression = 6; * @return Whether the nodeLabelExpression field is set. */ @java.lang.Override public boolean hasNodeLabelExpression() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string node_label_expression = 6; * @return The nodeLabelExpression. */ @java.lang.Override public java.lang.String getNodeLabelExpression() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabelExpression_ = s; } return s; } } /** * optional string node_label_expression = 6; * @return The bytes for nodeLabelExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int EXECUTION_TYPE_REQUEST_FIELD_NUMBER = 7; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionTypeRequest_; /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return Whether the executionTypeRequest field is set. */ @java.lang.Override public boolean hasExecutionTypeRequest() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return The executionTypeRequest. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest() { return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder() { return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_; } public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 8; private long allocationRequestId_ = -1L; /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasCapability()) { if (!getCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPriority()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getCapability()); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt32(4, numContainers_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeBool(5, relaxLocality_); } if (((bitField0_ & 0x00000020) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, nodeLabelExpression_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(7, getExecutionTypeRequest()); } if (((bitField0_ & 0x00000080) != 0)) { output.writeInt64(8, allocationRequestId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getPriority()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getCapability()); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, numContainers_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(5, relaxLocality_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, nodeLabelExpression_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, getExecutionTypeRequest()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(8, allocationRequestId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto) obj; if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasResourceName() != other.hasResourceName()) return false; if (hasResourceName()) { if (!getResourceName() .equals(other.getResourceName())) return false; } if (hasCapability() != other.hasCapability()) return false; if (hasCapability()) { if (!getCapability() .equals(other.getCapability())) return false; } if (hasNumContainers() != other.hasNumContainers()) return false; if (hasNumContainers()) { if (getNumContainers() != other.getNumContainers()) return false; } if (hasRelaxLocality() != other.hasRelaxLocality()) return false; if (hasRelaxLocality()) { if (getRelaxLocality() != other.getRelaxLocality()) return false; } if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false; if (hasNodeLabelExpression()) { if (!getNodeLabelExpression() .equals(other.getNodeLabelExpression())) return false; } if (hasExecutionTypeRequest() != other.hasExecutionTypeRequest()) return false; if (hasExecutionTypeRequest()) { if (!getExecutionTypeRequest() .equals(other.getExecutionTypeRequest())) return false; } if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false; if (hasAllocationRequestId()) { if (getAllocationRequestId() != other.getAllocationRequestId()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasResourceName()) { hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); } if (hasCapability()) { hash = (37 * hash) + CAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getCapability().hashCode(); } if (hasNumContainers()) { hash = (37 * hash) + NUM_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getNumContainers(); } if (hasRelaxLocality()) { hash = (37 * hash) + RELAX_LOCALITY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getRelaxLocality()); } if (hasNodeLabelExpression()) { hash = (37 * hash) + NODE_LABEL_EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelExpression().hashCode(); } if (hasExecutionTypeRequest()) { hash = (37 * hash) + EXECUTION_TYPE_REQUEST_FIELD_NUMBER; hash = (53 * hash) + getExecutionTypeRequest().hashCode(); } if (hasAllocationRequestId()) { hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocationRequestId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     *//////////////////////////////////////////////////////////////////////
     * //// From AM_RM_Protocol /////////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * 
* * Protobuf type {@code hadoop.yarn.ResourceRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPriorityFieldBuilder(); getCapabilityFieldBuilder(); getExecutionTypeRequestFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } resourceName_ = ""; capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } numContainers_ = 0; relaxLocality_ = true; nodeLabelExpression_ = ""; executionTypeRequest_ = null; if (executionTypeRequestBuilder_ != null) { executionTypeRequestBuilder_.dispose(); executionTypeRequestBuilder_ = null; } allocationRequestId_ = -1L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resourceName_ = resourceName_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.capability_ = capabilityBuilder_ == null ? capability_ : capabilityBuilder_.build(); to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.numContainers_ = numContainers_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.relaxLocality_ = relaxLocality_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.nodeLabelExpression_ = nodeLabelExpression_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.executionTypeRequest_ = executionTypeRequestBuilder_ == null ? executionTypeRequest_ : executionTypeRequestBuilder_.build(); to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.allocationRequestId_ = allocationRequestId_; to_bitField0_ |= 0x00000080; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()) return this; if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasResourceName()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasCapability()) { mergeCapability(other.getCapability()); } if (other.hasNumContainers()) { setNumContainers(other.getNumContainers()); } if (other.hasRelaxLocality()) { setRelaxLocality(other.getRelaxLocality()); } if (other.hasNodeLabelExpression()) { nodeLabelExpression_ = other.nodeLabelExpression_; bitField0_ |= 0x00000020; onChanged(); } if (other.hasExecutionTypeRequest()) { mergeExecutionTypeRequest(other.getExecutionTypeRequest()); } if (other.hasAllocationRequestId()) { setAllocationRequestId(other.getAllocationRequestId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasCapability()) { if (!getCapability().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { resourceName_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage( getCapabilityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 case 32: { numContainers_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { relaxLocality_ = input.readBool(); bitField0_ |= 0x00000010; break; } // case 40 case 50: { nodeLabelExpression_ = input.readBytes(); bitField0_ |= 0x00000020; break; } // case 50 case 58: { input.readMessage( getExecutionTypeRequestFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000040; break; } // case 58 case 64: { allocationRequestId_ = input.readInt64(); bitField0_ |= 0x00000080; break; } // case 64 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 1; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000001); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private java.lang.Object resourceName_ = ""; /** * optional string resource_name = 2; * @return Whether the resourceName field is set. */ public boolean hasResourceName() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string resource_name = 2; * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { resourceName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string resource_name = 2; * @return The bytes for resourceName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string resource_name = 2; * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string resource_name = 2; * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string resource_name = 2; * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return Whether the capability field is set. */ public boolean hasCapability() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 3; * @return The capability. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { if (capabilityBuilder_ == null) { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } else { return capabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public Builder setCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capability_ = value; } else { capabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public Builder setCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (capabilityBuilder_ == null) { capability_ = builderForValue.build(); } else { capabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public Builder mergeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && capability_ != null && capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getCapabilityBuilder().mergeFrom(value); } else { capability_ = value; } } else { capabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public Builder clearCapability() { bitField0_ = (bitField0_ & ~0x00000004); capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { if (capabilityBuilder_ != null) { return capabilityBuilder_.getMessageOrBuilder(); } else { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } } /** * optional .hadoop.yarn.ResourceProto capability = 3; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getCapabilityFieldBuilder() { if (capabilityBuilder_ == null) { capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getCapability(), getParentForChildren(), isClean()); capability_ = null; } return capabilityBuilder_; } private int numContainers_ ; /** * optional int32 num_containers = 4; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 num_containers = 4; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } /** * optional int32 num_containers = 4; * @param value The numContainers to set. * @return This builder for chaining. */ public Builder setNumContainers(int value) { numContainers_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int32 num_containers = 4; * @return This builder for chaining. */ public Builder clearNumContainers() { bitField0_ = (bitField0_ & ~0x00000008); numContainers_ = 0; onChanged(); return this; } private boolean relaxLocality_ = true; /** * optional bool relax_locality = 5 [default = true]; * @return Whether the relaxLocality field is set. */ @java.lang.Override public boolean hasRelaxLocality() { return ((bitField0_ & 0x00000010) != 0); } /** * optional bool relax_locality = 5 [default = true]; * @return The relaxLocality. */ @java.lang.Override public boolean getRelaxLocality() { return relaxLocality_; } /** * optional bool relax_locality = 5 [default = true]; * @param value The relaxLocality to set. * @return This builder for chaining. */ public Builder setRelaxLocality(boolean value) { relaxLocality_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional bool relax_locality = 5 [default = true]; * @return This builder for chaining. */ public Builder clearRelaxLocality() { bitField0_ = (bitField0_ & ~0x00000010); relaxLocality_ = true; onChanged(); return this; } private java.lang.Object nodeLabelExpression_ = ""; /** * optional string node_label_expression = 6; * @return Whether the nodeLabelExpression field is set. */ public boolean hasNodeLabelExpression() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string node_label_expression = 6; * @return The nodeLabelExpression. */ public java.lang.String getNodeLabelExpression() { java.lang.Object ref = nodeLabelExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabelExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string node_label_expression = 6; * @return The bytes for nodeLabelExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string node_label_expression = 6; * @param value The nodeLabelExpression to set. * @return This builder for chaining. */ public Builder setNodeLabelExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodeLabelExpression_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional string node_label_expression = 6; * @return This builder for chaining. */ public Builder clearNodeLabelExpression() { nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } /** * optional string node_label_expression = 6; * @param value The bytes for nodeLabelExpression to set. * @return This builder for chaining. */ public Builder setNodeLabelExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nodeLabelExpression_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionTypeRequest_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> executionTypeRequestBuilder_; /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return Whether the executionTypeRequest field is set. */ public boolean hasExecutionTypeRequest() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; * @return The executionTypeRequest. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest() { if (executionTypeRequestBuilder_ == null) { return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_; } else { return executionTypeRequestBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public Builder setExecutionTypeRequest(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) { if (executionTypeRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } executionTypeRequest_ = value; } else { executionTypeRequestBuilder_.setMessage(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public Builder setExecutionTypeRequest( org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder builderForValue) { if (executionTypeRequestBuilder_ == null) { executionTypeRequest_ = builderForValue.build(); } else { executionTypeRequestBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public Builder mergeExecutionTypeRequest(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) { if (executionTypeRequestBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0) && executionTypeRequest_ != null && executionTypeRequest_ != org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) { getExecutionTypeRequestBuilder().mergeFrom(value); } else { executionTypeRequest_ = value; } } else { executionTypeRequestBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public Builder clearExecutionTypeRequest() { bitField0_ = (bitField0_ & ~0x00000040); executionTypeRequest_ = null; if (executionTypeRequestBuilder_ != null) { executionTypeRequestBuilder_.dispose(); executionTypeRequestBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder getExecutionTypeRequestBuilder() { bitField0_ |= 0x00000040; onChanged(); return getExecutionTypeRequestFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder() { if (executionTypeRequestBuilder_ != null) { return executionTypeRequestBuilder_.getMessageOrBuilder(); } else { return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_; } } /** * optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> getExecutionTypeRequestFieldBuilder() { if (executionTypeRequestBuilder_ == null) { executionTypeRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder>( getExecutionTypeRequest(), getParentForChildren(), isClean()); executionTypeRequest_ = null; } return executionTypeRequestBuilder_; } private long allocationRequestId_ = -1L; /** * optional int64 allocation_request_id = 8 [default = -1]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } /** * optional int64 allocation_request_id = 8 [default = -1]; * @param value The allocationRequestId to set. * @return This builder for chaining. */ public Builder setAllocationRequestId(long value) { allocationRequestId_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional int64 allocation_request_id = 8 [default = -1]; * @return This builder for chaining. */ public Builder clearAllocationRequestId() { bitField0_ = (bitField0_ & ~0x00000080); allocationRequestId_ = -1L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ExecutionTypeRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ExecutionTypeRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return Whether the executionType field is set. */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return The executionType. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType(); /** * optional bool enforce_execution_type = 2 [default = false]; * @return Whether the enforceExecutionType field is set. */ boolean hasEnforceExecutionType(); /** * optional bool enforce_execution_type = 2 [default = false]; * @return The enforceExecutionType. */ boolean getEnforceExecutionType(); } /** * Protobuf type {@code hadoop.yarn.ExecutionTypeRequestProto} */ public static final class ExecutionTypeRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ExecutionTypeRequestProto) ExecutionTypeRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ExecutionTypeRequestProto.newBuilder() to construct. private ExecutionTypeRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ExecutionTypeRequestProto() { executionType_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ExecutionTypeRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder.class); } private int bitField0_; public static final int EXECUTION_TYPE_FIELD_NUMBER = 1; private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } public static final int ENFORCE_EXECUTION_TYPE_FIELD_NUMBER = 2; private boolean enforceExecutionType_ = false; /** * optional bool enforce_execution_type = 2 [default = false]; * @return Whether the enforceExecutionType field is set. */ @java.lang.Override public boolean hasEnforceExecutionType() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool enforce_execution_type = 2 [default = false]; * @return The enforceExecutionType. */ @java.lang.Override public boolean getEnforceExecutionType() { return enforceExecutionType_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, executionType_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeBool(2, enforceExecutionType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, executionType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(2, enforceExecutionType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto) obj; if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (executionType_ != other.executionType_) return false; } if (hasEnforceExecutionType() != other.hasEnforceExecutionType()) return false; if (hasEnforceExecutionType()) { if (getEnforceExecutionType() != other.getEnforceExecutionType()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasExecutionType()) { hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + executionType_; } if (hasEnforceExecutionType()) { hash = (37 * hash) + ENFORCE_EXECUTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getEnforceExecutionType()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ExecutionTypeRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ExecutionTypeRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; executionType_ = 1; enforceExecutionType_ = false; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.executionType_ = executionType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.enforceExecutionType_ = enforceExecutionType_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) return this; if (other.hasExecutionType()) { setExecutionType(other.getExecutionType()); } if (other.hasEnforceExecutionType()) { setEnforceExecutionType(other.getEnforceExecutionType()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { executionType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 16: { enforceExecutionType_ = input.readBool(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @param value The executionType to set. * @return This builder for chaining. */ public Builder setExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; executionType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED]; * @return This builder for chaining. */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000001); executionType_ = 1; onChanged(); return this; } private boolean enforceExecutionType_ ; /** * optional bool enforce_execution_type = 2 [default = false]; * @return Whether the enforceExecutionType field is set. */ @java.lang.Override public boolean hasEnforceExecutionType() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bool enforce_execution_type = 2 [default = false]; * @return The enforceExecutionType. */ @java.lang.Override public boolean getEnforceExecutionType() { return enforceExecutionType_; } /** * optional bool enforce_execution_type = 2 [default = false]; * @param value The enforceExecutionType to set. * @return This builder for chaining. */ public Builder setEnforceExecutionType(boolean value) { enforceExecutionType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional bool enforce_execution_type = 2 [default = false]; * @return This builder for chaining. */ public Builder clearEnforceExecutionType() { bitField0_ = (bitField0_ & ~0x00000002); enforceExecutionType_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ExecutionTypeRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ExecutionTypeRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ExecutionTypeRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SchedulingRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SchedulingRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int64 allocationRequestId = 1 [default = 0]; * @return Whether the allocationRequestId field is set. */ boolean hasAllocationRequestId(); /** * optional int64 allocationRequestId = 1 [default = 0]; * @return The allocationRequestId. */ long getAllocationRequestId(); /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return Whether the executionType field is set. */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return The executionType. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder(); /** * repeated string allocationTags = 4; * @return A list containing the allocationTags. */ java.util.List getAllocationTagsList(); /** * repeated string allocationTags = 4; * @return The count of allocationTags. */ int getAllocationTagsCount(); /** * repeated string allocationTags = 4; * @param index The index of the element to return. * @return The allocationTags at the given index. */ java.lang.String getAllocationTags(int index); /** * repeated string allocationTags = 4; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index); /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return Whether the resourceSizing field is set. */ boolean hasResourceSizing(); /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return The resourceSizing. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing(); /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder(); /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return Whether the placementConstraint field is set. */ boolean hasPlacementConstraint(); /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return The placementConstraint. */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint(); /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.SchedulingRequestProto} */ public static final class SchedulingRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SchedulingRequestProto) SchedulingRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SchedulingRequestProto.newBuilder() to construct. private SchedulingRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SchedulingRequestProto() { allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SchedulingRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder.class); } private int bitField0_; public static final int ALLOCATIONREQUESTID_FIELD_NUMBER = 1; private long allocationRequestId_ = 0L; /** * optional int64 allocationRequestId = 1 [default = 0]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 allocationRequestId = 1 [default = 0]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } public static final int PRIORITY_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int EXECUTIONTYPE_FIELD_NUMBER = 3; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionType_; /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType() { return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder() { return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_; } public static final int ALLOCATIONTAGS_FIELD_NUMBER = 4; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_; /** * repeated string allocationTags = 4; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_; } /** * repeated string allocationTags = 4; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocationTags = 4; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocationTags = 4; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } public static final int RESOURCESIZING_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto resourceSizing_; /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return Whether the resourceSizing field is set. */ @java.lang.Override public boolean hasResourceSizing() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return The resourceSizing. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing() { return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_; } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder() { return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_; } public static final int PLACEMENTCONSTRAINT_FIELD_NUMBER = 6; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return Whether the placementConstraint field is set. */ @java.lang.Override public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return The placementConstraint. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResourceSizing()) { if (!getResourceSizing().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasPlacementConstraint()) { if (!getPlacementConstraint().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(1, allocationRequestId_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getPriority()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getExecutionType()); } for (int i = 0; i < allocationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, allocationTags_.getRaw(i)); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(5, getResourceSizing()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(6, getPlacementConstraint()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(1, allocationRequestId_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getPriority()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getExecutionType()); } { int dataSize = 0; for (int i = 0; i < allocationTags_.size(); i++) { dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i)); } size += dataSize; size += 1 * getAllocationTagsList().size(); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getResourceSizing()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, getPlacementConstraint()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto) obj; if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false; if (hasAllocationRequestId()) { if (getAllocationRequestId() != other.getAllocationRequestId()) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (!getExecutionType() .equals(other.getExecutionType())) return false; } if (!getAllocationTagsList() .equals(other.getAllocationTagsList())) return false; if (hasResourceSizing() != other.hasResourceSizing()) return false; if (hasResourceSizing()) { if (!getResourceSizing() .equals(other.getResourceSizing())) return false; } if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false; if (hasPlacementConstraint()) { if (!getPlacementConstraint() .equals(other.getPlacementConstraint())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAllocationRequestId()) { hash = (37 * hash) + ALLOCATIONREQUESTID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocationRequestId()); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasExecutionType()) { hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER; hash = (53 * hash) + getExecutionType().hashCode(); } if (getAllocationTagsCount() > 0) { hash = (37 * hash) + ALLOCATIONTAGS_FIELD_NUMBER; hash = (53 * hash) + getAllocationTagsList().hashCode(); } if (hasResourceSizing()) { hash = (37 * hash) + RESOURCESIZING_FIELD_NUMBER; hash = (53 * hash) + getResourceSizing().hashCode(); } if (hasPlacementConstraint()) { hash = (37 * hash) + PLACEMENTCONSTRAINT_FIELD_NUMBER; hash = (53 * hash) + getPlacementConstraint().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SchedulingRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SchedulingRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPriorityFieldBuilder(); getExecutionTypeFieldBuilder(); getResourceSizingFieldBuilder(); getPlacementConstraintFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; allocationRequestId_ = 0L; priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } executionType_ = null; if (executionTypeBuilder_ != null) { executionTypeBuilder_.dispose(); executionTypeBuilder_ = null; } allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); resourceSizing_ = null; if (resourceSizingBuilder_ != null) { resourceSizingBuilder_.dispose(); resourceSizingBuilder_ = null; } placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result) { if (((bitField0_ & 0x00000008) != 0)) { allocationTags_ = allocationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000008); } result.allocationTags_ = allocationTags_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.allocationRequestId_ = allocationRequestId_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.executionType_ = executionTypeBuilder_ == null ? executionType_ : executionTypeBuilder_.build(); to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000010) != 0)) { result.resourceSizing_ = resourceSizingBuilder_ == null ? resourceSizing_ : resourceSizingBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000020) != 0)) { result.placementConstraint_ = placementConstraintBuilder_ == null ? placementConstraint_ : placementConstraintBuilder_.build(); to_bitField0_ |= 0x00000010; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()) return this; if (other.hasAllocationRequestId()) { setAllocationRequestId(other.getAllocationRequestId()); } if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasExecutionType()) { mergeExecutionType(other.getExecutionType()); } if (!other.allocationTags_.isEmpty()) { if (allocationTags_.isEmpty()) { allocationTags_ = other.allocationTags_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureAllocationTagsIsMutable(); allocationTags_.addAll(other.allocationTags_); } onChanged(); } if (other.hasResourceSizing()) { mergeResourceSizing(other.getResourceSizing()); } if (other.hasPlacementConstraint()) { mergePlacementConstraint(other.getPlacementConstraint()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResourceSizing()) { if (!getResourceSizing().isInitialized()) { return false; } } if (hasPlacementConstraint()) { if (!getPlacementConstraint().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { allocationRequestId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage( getExecutionTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 case 34: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureAllocationTagsIsMutable(); allocationTags_.add(bs); break; } // case 34 case 42: { input.readMessage( getResourceSizingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 50: { input.readMessage( getPlacementConstraintFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000020; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long allocationRequestId_ ; /** * optional int64 allocationRequestId = 1 [default = 0]; * @return Whether the allocationRequestId field is set. */ @java.lang.Override public boolean hasAllocationRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 allocationRequestId = 1 [default = 0]; * @return The allocationRequestId. */ @java.lang.Override public long getAllocationRequestId() { return allocationRequestId_; } /** * optional int64 allocationRequestId = 1 [default = 0]; * @param value The allocationRequestId to set. * @return This builder for chaining. */ public Builder setAllocationRequestId(long value) { allocationRequestId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int64 allocationRequestId = 1 [default = 0]; * @return This builder for chaining. */ public Builder clearAllocationRequestId() { bitField0_ = (bitField0_ & ~0x00000001); allocationRequestId_ = 0L; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 2; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000002); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000002; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionType_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> executionTypeBuilder_; /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return Whether the executionType field is set. */ public boolean hasExecutionType() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; * @return The executionType. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType() { if (executionTypeBuilder_ == null) { return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_; } else { return executionTypeBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public Builder setExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) { if (executionTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } executionType_ = value; } else { executionTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public Builder setExecutionType( org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder builderForValue) { if (executionTypeBuilder_ == null) { executionType_ = builderForValue.build(); } else { executionTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public Builder mergeExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) { if (executionTypeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && executionType_ != null && executionType_ != org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) { getExecutionTypeBuilder().mergeFrom(value); } else { executionType_ = value; } } else { executionTypeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000004); executionType_ = null; if (executionTypeBuilder_ != null) { executionTypeBuilder_.dispose(); executionTypeBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder getExecutionTypeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getExecutionTypeFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder() { if (executionTypeBuilder_ != null) { return executionTypeBuilder_.getMessageOrBuilder(); } else { return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_; } } /** * optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> getExecutionTypeFieldBuilder() { if (executionTypeBuilder_ == null) { executionTypeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder>( getExecutionType(), getParentForChildren(), isClean()); executionType_ = null; } return executionTypeBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureAllocationTagsIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_); bitField0_ |= 0x00000008; } } /** * repeated string allocationTags = 4; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_.getUnmodifiableView(); } /** * repeated string allocationTags = 4; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocationTags = 4; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocationTags = 4; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } /** * repeated string allocationTags = 4; * @param index The index to set the value at. * @param value The allocationTags to set. * @return This builder for chaining. */ public Builder setAllocationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.set(index, value); onChanged(); return this; } /** * repeated string allocationTags = 4; * @param value The allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } /** * repeated string allocationTags = 4; * @param values The allocationTags to add. * @return This builder for chaining. */ public Builder addAllAllocationTags( java.lang.Iterable values) { ensureAllocationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, allocationTags_); onChanged(); return this; } /** * repeated string allocationTags = 4; * @return This builder for chaining. */ public Builder clearAllocationTags() { allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * repeated string allocationTags = 4; * @param value The bytes of the allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto resourceSizing_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder> resourceSizingBuilder_; /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return Whether the resourceSizing field is set. */ public boolean hasResourceSizing() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; * @return The resourceSizing. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing() { if (resourceSizingBuilder_ == null) { return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_; } else { return resourceSizingBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public Builder setResourceSizing(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto value) { if (resourceSizingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resourceSizing_ = value; } else { resourceSizingBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public Builder setResourceSizing( org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder builderForValue) { if (resourceSizingBuilder_ == null) { resourceSizing_ = builderForValue.build(); } else { resourceSizingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public Builder mergeResourceSizing(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto value) { if (resourceSizingBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && resourceSizing_ != null && resourceSizing_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance()) { getResourceSizingBuilder().mergeFrom(value); } else { resourceSizing_ = value; } } else { resourceSizingBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public Builder clearResourceSizing() { bitField0_ = (bitField0_ & ~0x00000010); resourceSizing_ = null; if (resourceSizingBuilder_ != null) { resourceSizingBuilder_.dispose(); resourceSizingBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder getResourceSizingBuilder() { bitField0_ |= 0x00000010; onChanged(); return getResourceSizingFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder() { if (resourceSizingBuilder_ != null) { return resourceSizingBuilder_.getMessageOrBuilder(); } else { return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_; } } /** * optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder> getResourceSizingFieldBuilder() { if (resourceSizingBuilder_ == null) { resourceSizingBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder>( getResourceSizing(), getParentForChildren(), isClean()); resourceSizing_ = null; } return resourceSizingBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_; /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return Whether the placementConstraint field is set. */ public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; * @return The placementConstraint. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { if (placementConstraintBuilder_ == null) { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } else { return placementConstraintBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public Builder setPlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (value == null) { throw new NullPointerException(); } placementConstraint_ = value; } else { placementConstraintBuilder_.setMessage(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public Builder setPlacementConstraint( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (placementConstraintBuilder_ == null) { placementConstraint_ = builderForValue.build(); } else { placementConstraintBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public Builder mergePlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && placementConstraint_ != null && placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) { getPlacementConstraintBuilder().mergeFrom(value); } else { placementConstraint_ = value; } } else { placementConstraintBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public Builder clearPlacementConstraint() { bitField0_ = (bitField0_ & ~0x00000020); placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() { bitField0_ |= 0x00000020; onChanged(); return getPlacementConstraintFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { if (placementConstraintBuilder_ != null) { return placementConstraintBuilder_.getMessageOrBuilder(); } else { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } } /** * optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> getPlacementConstraintFieldBuilder() { if (placementConstraintBuilder_ == null) { placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>( getPlacementConstraint(), getParentForChildren(), isClean()); placementConstraint_ = null; } return placementConstraintBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SchedulingRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SchedulingRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SchedulingRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceSizingProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceSizingProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 numAllocations = 1; * @return Whether the numAllocations field is set. */ boolean hasNumAllocations(); /** * optional int32 numAllocations = 1; * @return The numAllocations. */ int getNumAllocations(); /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ boolean hasResources(); /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources(); /** * optional .hadoop.yarn.ResourceProto resources = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ResourceSizingProto} */ public static final class ResourceSizingProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceSizingProto) ResourceSizingProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceSizingProto.newBuilder() to construct. private ResourceSizingProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceSizingProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceSizingProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder.class); } private int bitField0_; public static final int NUMALLOCATIONS_FIELD_NUMBER = 1; private int numAllocations_ = 0; /** * optional int32 numAllocations = 1; * @return Whether the numAllocations field is set. */ @java.lang.Override public boolean hasNumAllocations() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 numAllocations = 1; * @return The numAllocations. */ @java.lang.Override public int getNumAllocations() { return numAllocations_; } public static final int RESOURCES_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ @java.lang.Override public boolean hasResources() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResources()) { if (!getResources().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, numAllocations_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getResources()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, numAllocations_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getResources()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto) obj; if (hasNumAllocations() != other.hasNumAllocations()) return false; if (hasNumAllocations()) { if (getNumAllocations() != other.getNumAllocations()) return false; } if (hasResources() != other.hasResources()) return false; if (hasResources()) { if (!getResources() .equals(other.getResources())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNumAllocations()) { hash = (37 * hash) + NUMALLOCATIONS_FIELD_NUMBER; hash = (53 * hash) + getNumAllocations(); } if (hasResources()) { hash = (37 * hash) + RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getResources().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceSizingProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceSizingProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourcesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; numAllocations_ = 0; resources_ = null; if (resourcesBuilder_ != null) { resourcesBuilder_.dispose(); resourcesBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.numAllocations_ = numAllocations_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resources_ = resourcesBuilder_ == null ? resources_ : resourcesBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance()) return this; if (other.hasNumAllocations()) { setNumAllocations(other.getNumAllocations()); } if (other.hasResources()) { mergeResources(other.getResources()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResources()) { if (!getResources().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { numAllocations_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getResourcesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int numAllocations_ ; /** * optional int32 numAllocations = 1; * @return Whether the numAllocations field is set. */ @java.lang.Override public boolean hasNumAllocations() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 numAllocations = 1; * @return The numAllocations. */ @java.lang.Override public int getNumAllocations() { return numAllocations_; } /** * optional int32 numAllocations = 1; * @param value The numAllocations to set. * @return This builder for chaining. */ public Builder setNumAllocations(int value) { numAllocations_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 numAllocations = 1; * @return This builder for chaining. */ public Builder clearNumAllocations() { bitField0_ = (bitField0_ & ~0x00000001); numAllocations_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_; /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return Whether the resources field is set. */ public boolean hasResources() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ResourceProto resources = 2; * @return The resources. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() { if (resourcesBuilder_ == null) { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } else { return resourcesBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public Builder setResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resources_ = value; } else { resourcesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public Builder setResources( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourcesBuilder_ == null) { resources_ = builderForValue.build(); } else { resourcesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public Builder mergeResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourcesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && resources_ != null && resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourcesBuilder().mergeFrom(value); } else { resources_ = value; } } else { resourcesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public Builder clearResources() { bitField0_ = (bitField0_ & ~0x00000002); resources_ = null; if (resourcesBuilder_ != null) { resourcesBuilder_.dispose(); resourcesBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResourcesFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() { if (resourcesBuilder_ != null) { return resourcesBuilder_.getMessageOrBuilder(); } else { return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_; } } /** * optional .hadoop.yarn.ResourceProto resources = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourcesFieldBuilder() { if (resourcesBuilder_ == null) { resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResources(), getParentForChildren(), isClean()); resources_ = null; } return resourcesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceSizingProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceSizingProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceSizingProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RejectedSchedulingRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.RejectedSchedulingRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return Whether the reason field is set. */ boolean hasReason(); /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return The reason. */ org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason(); /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return Whether the request field is set. */ boolean hasRequest(); /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return The request. */ org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest(); /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.RejectedSchedulingRequestProto} */ public static final class RejectedSchedulingRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.RejectedSchedulingRequestProto) RejectedSchedulingRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use RejectedSchedulingRequestProto.newBuilder() to construct. private RejectedSchedulingRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RejectedSchedulingRequestProto() { reason_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RejectedSchedulingRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder.class); } private int bitField0_; public static final int REASON_FIELD_NUMBER = 1; private int reason_ = 1; /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return Whether the reason field is set. */ @java.lang.Override public boolean hasReason() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return The reason. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason() { org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto result = org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(reason_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.RRP_COULD_NOT_PLACE_ON_NODE : result; } public static final int REQUEST_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto request_; /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return Whether the request field is set. */ @java.lang.Override public boolean hasRequest() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return The request. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest() { return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_; } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder() { return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasReason()) { memoizedIsInitialized = 0; return false; } if (!hasRequest()) { memoizedIsInitialized = 0; return false; } if (!getRequest().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, reason_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getRequest()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, reason_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getRequest()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto) obj; if (hasReason() != other.hasReason()) return false; if (hasReason()) { if (reason_ != other.reason_) return false; } if (hasRequest() != other.hasRequest()) return false; if (hasRequest()) { if (!getRequest() .equals(other.getRequest())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReason()) { hash = (37 * hash) + REASON_FIELD_NUMBER; hash = (53 * hash) + reason_; } if (hasRequest()) { hash = (37 * hash) + REQUEST_FIELD_NUMBER; hash = (53 * hash) + getRequest().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.RejectedSchedulingRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.RejectedSchedulingRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRequestFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; reason_ = 1; request_ = null; if (requestBuilder_ != null) { requestBuilder_.dispose(); requestBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.reason_ = reason_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.request_ = requestBuilder_ == null ? request_ : requestBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance()) return this; if (other.hasReason()) { setReason(other.getReason()); } if (other.hasRequest()) { mergeRequest(other.getRequest()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasReason()) { return false; } if (!hasRequest()) { return false; } if (!getRequest().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { reason_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { input.readMessage( getRequestFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int reason_ = 1; /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return Whether the reason field is set. */ @java.lang.Override public boolean hasReason() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return The reason. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason() { org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto result = org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(reason_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.RRP_COULD_NOT_PLACE_ON_NODE : result; } /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @param value The reason to set. * @return This builder for chaining. */ public Builder setReason(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; reason_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.RejectionReasonProto reason = 1; * @return This builder for chaining. */ public Builder clearReason() { bitField0_ = (bitField0_ & ~0x00000001); reason_ = 1; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto request_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> requestBuilder_; /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return Whether the request field is set. */ public boolean hasRequest() { return ((bitField0_ & 0x00000002) != 0); } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; * @return The request. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest() { if (requestBuilder_ == null) { return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_; } else { return requestBuilder_.getMessage(); } } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public Builder setRequest(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) { if (requestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } request_ = value; } else { requestBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public Builder setRequest( org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) { if (requestBuilder_ == null) { request_ = builderForValue.build(); } else { requestBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public Builder mergeRequest(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) { if (requestBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && request_ != null && request_ != org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()) { getRequestBuilder().mergeFrom(value); } else { request_ = value; } } else { requestBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public Builder clearRequest() { bitField0_ = (bitField0_ & ~0x00000002); request_ = null; if (requestBuilder_ != null) { requestBuilder_.dispose(); requestBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder getRequestBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRequestFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder() { if (requestBuilder_ != null) { return requestBuilder_.getMessageOrBuilder(); } else { return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_; } } /** * required .hadoop.yarn.SchedulingRequestProto request = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> getRequestFieldBuilder() { if (requestBuilder_ == null) { requestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder>( getRequest(), getParentForChildren(), isClean()); request_ = null; } return requestBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.RejectedSchedulingRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.RejectedSchedulingRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public RejectedSchedulingRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PreemptionMessageProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionMessageProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return Whether the strictContract field is set. */ boolean hasStrictContract(); /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return The strictContract. */ org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract(); /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder(); /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return Whether the contract field is set. */ boolean hasContract(); /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return The contract. */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract(); /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.PreemptionMessageProto} */ public static final class PreemptionMessageProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionMessageProto) PreemptionMessageProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PreemptionMessageProto.newBuilder() to construct. private PreemptionMessageProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PreemptionMessageProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PreemptionMessageProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder.class); } private int bitField0_; public static final int STRICTCONTRACT_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto strictContract_; /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return Whether the strictContract field is set. */ @java.lang.Override public boolean hasStrictContract() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return The strictContract. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract() { return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_; } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder() { return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_; } public static final int CONTRACT_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto contract_; /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return Whether the contract field is set. */ @java.lang.Override public boolean hasContract() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return The contract. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract() { return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_; } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder() { return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasContract()) { if (!getContract().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getStrictContract()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getContract()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getStrictContract()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getContract()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto) obj; if (hasStrictContract() != other.hasStrictContract()) return false; if (hasStrictContract()) { if (!getStrictContract() .equals(other.getStrictContract())) return false; } if (hasContract() != other.hasContract()) return false; if (hasContract()) { if (!getContract() .equals(other.getContract())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStrictContract()) { hash = (37 * hash) + STRICTCONTRACT_FIELD_NUMBER; hash = (53 * hash) + getStrictContract().hashCode(); } if (hasContract()) { hash = (37 * hash) + CONTRACT_FIELD_NUMBER; hash = (53 * hash) + getContract().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PreemptionMessageProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionMessageProto) org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getStrictContractFieldBuilder(); getContractFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; strictContract_ = null; if (strictContractBuilder_ != null) { strictContractBuilder_.dispose(); strictContractBuilder_ = null; } contract_ = null; if (contractBuilder_ != null) { contractBuilder_.dispose(); contractBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.strictContract_ = strictContractBuilder_ == null ? strictContract_ : strictContractBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.contract_ = contractBuilder_ == null ? contract_ : contractBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance()) return this; if (other.hasStrictContract()) { mergeStrictContract(other.getStrictContract()); } if (other.hasContract()) { mergeContract(other.getContract()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasContract()) { if (!getContract().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getStrictContractFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getContractFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto strictContract_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder> strictContractBuilder_; /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return Whether the strictContract field is set. */ public boolean hasStrictContract() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; * @return The strictContract. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract() { if (strictContractBuilder_ == null) { return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_; } else { return strictContractBuilder_.getMessage(); } } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public Builder setStrictContract(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto value) { if (strictContractBuilder_ == null) { if (value == null) { throw new NullPointerException(); } strictContract_ = value; } else { strictContractBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public Builder setStrictContract( org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder builderForValue) { if (strictContractBuilder_ == null) { strictContract_ = builderForValue.build(); } else { strictContractBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public Builder mergeStrictContract(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto value) { if (strictContractBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && strictContract_ != null && strictContract_ != org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance()) { getStrictContractBuilder().mergeFrom(value); } else { strictContract_ = value; } } else { strictContractBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public Builder clearStrictContract() { bitField0_ = (bitField0_ & ~0x00000001); strictContract_ = null; if (strictContractBuilder_ != null) { strictContractBuilder_.dispose(); strictContractBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder getStrictContractBuilder() { bitField0_ |= 0x00000001; onChanged(); return getStrictContractFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder() { if (strictContractBuilder_ != null) { return strictContractBuilder_.getMessageOrBuilder(); } else { return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_; } } /** * optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder> getStrictContractFieldBuilder() { if (strictContractBuilder_ == null) { strictContractBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder>( getStrictContract(), getParentForChildren(), isClean()); strictContract_ = null; } return strictContractBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto contract_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder> contractBuilder_; /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return Whether the contract field is set. */ public boolean hasContract() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; * @return The contract. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract() { if (contractBuilder_ == null) { return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_; } else { return contractBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public Builder setContract(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto value) { if (contractBuilder_ == null) { if (value == null) { throw new NullPointerException(); } contract_ = value; } else { contractBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public Builder setContract( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder builderForValue) { if (contractBuilder_ == null) { contract_ = builderForValue.build(); } else { contractBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public Builder mergeContract(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto value) { if (contractBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && contract_ != null && contract_ != org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance()) { getContractBuilder().mergeFrom(value); } else { contract_ = value; } } else { contractBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public Builder clearContract() { bitField0_ = (bitField0_ & ~0x00000002); contract_ = null; if (contractBuilder_ != null) { contractBuilder_.dispose(); contractBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder getContractBuilder() { bitField0_ |= 0x00000002; onChanged(); return getContractFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder() { if (contractBuilder_ != null) { return contractBuilder_.getMessageOrBuilder(); } else { return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_; } } /** * optional .hadoop.yarn.PreemptionContractProto contract = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder> getContractFieldBuilder() { if (contractBuilder_ == null) { contractBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder>( getContract(), getParentForChildren(), isClean()); contract_ = null; } return contractBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionMessageProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionMessageProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PreemptionMessageProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StrictPreemptionContractProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StrictPreemptionContractProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ java.util.List getContainerList(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ int getContainerCount(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ java.util.List getContainerOrBuilderList(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.StrictPreemptionContractProto} */ public static final class StrictPreemptionContractProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StrictPreemptionContractProto) StrictPreemptionContractProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StrictPreemptionContractProto.newBuilder() to construct. private StrictPreemptionContractProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StrictPreemptionContractProto() { container_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StrictPreemptionContractProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder.class); } public static final int CONTAINER_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List container_; /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ @java.lang.Override public java.util.List getContainerList() { return container_; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ @java.lang.Override public java.util.List getContainerOrBuilderList() { return container_; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ @java.lang.Override public int getContainerCount() { return container_.size(); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) { return container_.get(index); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index) { return container_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < container_.size(); i++) { output.writeMessage(1, container_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < container_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, container_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto) obj; if (!getContainerList() .equals(other.getContainerList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getContainerCount() > 0) { hash = (37 * hash) + CONTAINER_FIELD_NUMBER; hash = (53 * hash) + getContainerList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StrictPreemptionContractProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StrictPreemptionContractProto) org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (containerBuilder_ == null) { container_ = java.util.Collections.emptyList(); } else { container_ = null; containerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result) { if (containerBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { container_ = java.util.Collections.unmodifiableList(container_); bitField0_ = (bitField0_ & ~0x00000001); } result.container_ = container_; } else { result.container_ = containerBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance()) return this; if (containerBuilder_ == null) { if (!other.container_.isEmpty()) { if (container_.isEmpty()) { container_ = other.container_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureContainerIsMutable(); container_.addAll(other.container_); } onChanged(); } } else { if (!other.container_.isEmpty()) { if (containerBuilder_.isEmpty()) { containerBuilder_.dispose(); containerBuilder_ = null; container_ = other.container_; bitField0_ = (bitField0_ & ~0x00000001); containerBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerFieldBuilder() : null; } else { containerBuilder_.addAllMessages(other.container_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.PARSER, extensionRegistry); if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(m); } else { containerBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List container_ = java.util.Collections.emptyList(); private void ensureContainerIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { container_ = new java.util.ArrayList(container_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> containerBuilder_; /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public java.util.List getContainerList() { if (containerBuilder_ == null) { return java.util.Collections.unmodifiableList(container_); } else { return containerBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public int getContainerCount() { if (containerBuilder_ == null) { return container_.size(); } else { return containerBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) { if (containerBuilder_ == null) { return container_.get(index); } else { return containerBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder setContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.set(index, value); onChanged(); } else { containerBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder setContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.set(index, builderForValue.build()); onChanged(); } else { containerBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder addContainer(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.add(value); onChanged(); } else { containerBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder addContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.add(index, value); onChanged(); } else { containerBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder addContainer( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(builderForValue.build()); onChanged(); } else { containerBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder addContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(index, builderForValue.build()); onChanged(); } else { containerBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder addAllContainer( java.lang.Iterable values) { if (containerBuilder_ == null) { ensureContainerIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, container_); onChanged(); } else { containerBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder clearContainer() { if (containerBuilder_ == null) { container_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { containerBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public Builder removeContainer(int index) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.remove(index); onChanged(); } else { containerBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder getContainerBuilder( int index) { return getContainerFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index) { if (containerBuilder_ == null) { return container_.get(index); } else { return containerBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public java.util.List getContainerOrBuilderList() { if (containerBuilder_ != null) { return containerBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(container_); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder() { return getContainerFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder( int index) { return getContainerFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 1; */ public java.util.List getContainerBuilderList() { return getContainerFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> getContainerFieldBuilder() { if (containerBuilder_ == null) { containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder>( container_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); container_ = null; } return containerBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StrictPreemptionContractProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StrictPreemptionContractProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StrictPreemptionContractProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PreemptionContractProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionContractProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ java.util.List getResourceList(); /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index); /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ int getResourceCount(); /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ java.util.List getResourceOrBuilderList(); /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder( int index); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ java.util.List getContainerList(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ int getContainerCount(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ java.util.List getContainerOrBuilderList(); /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.PreemptionContractProto} */ public static final class PreemptionContractProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionContractProto) PreemptionContractProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PreemptionContractProto.newBuilder() to construct. private PreemptionContractProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PreemptionContractProto() { resource_ = java.util.Collections.emptyList(); container_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PreemptionContractProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder.class); } public static final int RESOURCE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List resource_; /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ @java.lang.Override public java.util.List getResourceList() { return resource_; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ @java.lang.Override public java.util.List getResourceOrBuilderList() { return resource_; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ @java.lang.Override public int getResourceCount() { return resource_.size(); } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index) { return resource_.get(index); } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder( int index) { return resource_.get(index); } public static final int CONTAINER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List container_; /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ @java.lang.Override public java.util.List getContainerList() { return container_; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ @java.lang.Override public java.util.List getContainerOrBuilderList() { return container_; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ @java.lang.Override public int getContainerCount() { return container_.size(); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) { return container_.get(index); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index) { return container_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getResourceCount(); i++) { if (!getResource(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < resource_.size(); i++) { output.writeMessage(1, resource_.get(i)); } for (int i = 0; i < container_.size(); i++) { output.writeMessage(2, container_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < resource_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, resource_.get(i)); } for (int i = 0; i < container_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, container_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto) obj; if (!getResourceList() .equals(other.getResourceList())) return false; if (!getContainerList() .equals(other.getContainerList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResourceCount() > 0) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResourceList().hashCode(); } if (getContainerCount() > 0) { hash = (37 * hash) + CONTAINER_FIELD_NUMBER; hash = (53 * hash) + getContainerList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PreemptionContractProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionContractProto) org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (resourceBuilder_ == null) { resource_ = java.util.Collections.emptyList(); } else { resource_ = null; resourceBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (containerBuilder_ == null) { container_ = java.util.Collections.emptyList(); } else { container_ = null; containerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { resource_ = java.util.Collections.unmodifiableList(resource_); bitField0_ = (bitField0_ & ~0x00000001); } result.resource_ = resource_; } else { result.resource_ = resourceBuilder_.build(); } if (containerBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { container_ = java.util.Collections.unmodifiableList(container_); bitField0_ = (bitField0_ & ~0x00000002); } result.container_ = container_; } else { result.container_ = containerBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance()) return this; if (resourceBuilder_ == null) { if (!other.resource_.isEmpty()) { if (resource_.isEmpty()) { resource_ = other.resource_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResourceIsMutable(); resource_.addAll(other.resource_); } onChanged(); } } else { if (!other.resource_.isEmpty()) { if (resourceBuilder_.isEmpty()) { resourceBuilder_.dispose(); resourceBuilder_ = null; resource_ = other.resource_; bitField0_ = (bitField0_ & ~0x00000001); resourceBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResourceFieldBuilder() : null; } else { resourceBuilder_.addAllMessages(other.resource_); } } } if (containerBuilder_ == null) { if (!other.container_.isEmpty()) { if (container_.isEmpty()) { container_ = other.container_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureContainerIsMutable(); container_.addAll(other.container_); } onChanged(); } } else { if (!other.container_.isEmpty()) { if (containerBuilder_.isEmpty()) { containerBuilder_.dispose(); containerBuilder_ = null; container_ = other.container_; bitField0_ = (bitField0_ & ~0x00000002); containerBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerFieldBuilder() : null; } else { containerBuilder_.addAllMessages(other.container_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getResourceCount(); i++) { if (!getResource(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.PARSER, extensionRegistry); if (resourceBuilder_ == null) { ensureResourceIsMutable(); resource_.add(m); } else { resourceBuilder_.addMessage(m); } break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.PARSER, extensionRegistry); if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(m); } else { containerBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List resource_ = java.util.Collections.emptyList(); private void ensureResourceIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { resource_ = new java.util.ArrayList(resource_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> resourceBuilder_; /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public java.util.List getResourceList() { if (resourceBuilder_ == null) { return java.util.Collections.unmodifiableList(resource_); } else { return resourceBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public int getResourceCount() { if (resourceBuilder_ == null) { return resource_.size(); } else { return resourceBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index) { if (resourceBuilder_ == null) { return resource_.get(index); } else { return resourceBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder setResource( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceIsMutable(); resource_.set(index, value); onChanged(); } else { resourceBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder setResource( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) { if (resourceBuilder_ == null) { ensureResourceIsMutable(); resource_.set(index, builderForValue.build()); onChanged(); } else { resourceBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder addResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceIsMutable(); resource_.add(value); onChanged(); } else { resourceBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder addResource( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResourceIsMutable(); resource_.add(index, value); onChanged(); } else { resourceBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder addResource( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) { if (resourceBuilder_ == null) { ensureResourceIsMutable(); resource_.add(builderForValue.build()); onChanged(); } else { resourceBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder addResource( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) { if (resourceBuilder_ == null) { ensureResourceIsMutable(); resource_.add(index, builderForValue.build()); onChanged(); } else { resourceBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder addAllResource( java.lang.Iterable values) { if (resourceBuilder_ == null) { ensureResourceIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, resource_); onChanged(); } else { resourceBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder clearResource() { if (resourceBuilder_ == null) { resource_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { resourceBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public Builder removeResource(int index) { if (resourceBuilder_ == null) { ensureResourceIsMutable(); resource_.remove(index); onChanged(); } else { resourceBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder getResourceBuilder( int index) { return getResourceFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder( int index) { if (resourceBuilder_ == null) { return resource_.get(index); } else { return resourceBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public java.util.List getResourceOrBuilderList() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resource_); } } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder addResourceBuilder() { return getResourceFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder addResourceBuilder( int index) { return getResourceFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1; */ public java.util.List getResourceBuilderList() { return getResourceFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder>( resource_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private java.util.List container_ = java.util.Collections.emptyList(); private void ensureContainerIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { container_ = new java.util.ArrayList(container_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> containerBuilder_; /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public java.util.List getContainerList() { if (containerBuilder_ == null) { return java.util.Collections.unmodifiableList(container_); } else { return containerBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public int getContainerCount() { if (containerBuilder_ == null) { return container_.size(); } else { return containerBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) { if (containerBuilder_ == null) { return container_.get(index); } else { return containerBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder setContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.set(index, value); onChanged(); } else { containerBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder setContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.set(index, builderForValue.build()); onChanged(); } else { containerBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder addContainer(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.add(value); onChanged(); } else { containerBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder addContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) { if (containerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerIsMutable(); container_.add(index, value); onChanged(); } else { containerBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder addContainer( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(builderForValue.build()); onChanged(); } else { containerBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder addContainer( int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.add(index, builderForValue.build()); onChanged(); } else { containerBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder addAllContainer( java.lang.Iterable values) { if (containerBuilder_ == null) { ensureContainerIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, container_); onChanged(); } else { containerBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder clearContainer() { if (containerBuilder_ == null) { container_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { containerBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public Builder removeContainer(int index) { if (containerBuilder_ == null) { ensureContainerIsMutable(); container_.remove(index); onChanged(); } else { containerBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder getContainerBuilder( int index) { return getContainerFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder( int index) { if (containerBuilder_ == null) { return container_.get(index); } else { return containerBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public java.util.List getContainerOrBuilderList() { if (containerBuilder_ != null) { return containerBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(container_); } } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder() { return getContainerFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder( int index) { return getContainerFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PreemptionContainerProto container = 2; */ public java.util.List getContainerBuilderList() { return getContainerFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> getContainerFieldBuilder() { if (containerBuilder_ == null) { containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder>( container_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); container_ = null; } return containerBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionContractProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionContractProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PreemptionContractProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PreemptionContainerProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionContainerProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ boolean hasId(); /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId(); /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.PreemptionContainerProto} */ public static final class PreemptionContainerProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionContainerProto) PreemptionContainerProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PreemptionContainerProto.newBuilder() to construct. private PreemptionContainerProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PreemptionContainerProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PreemptionContainerProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder.class); } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_; /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getId()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getId()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto) obj; if (hasId() != other.hasId()) return false; if (hasId()) { if (!getId() .equals(other.getId())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PreemptionContainerProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionContainerProto) org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = null; if (idBuilder_ != null) { idBuilder_.dispose(); idBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = idBuilder_ == null ? id_ : idBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()) return this; if (other.hasId()) { mergeId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> idBuilder_; /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return Whether the id field is set. */ public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; * @return The id. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() { if (idBuilder_ == null) { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } else { return idBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder setId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (idBuilder_ == null) { if (value == null) { throw new NullPointerException(); } id_ = value; } else { idBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder setId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (idBuilder_ == null) { id_ = builderForValue.build(); } else { idBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder mergeId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (idBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && id_ != null && id_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { getIdBuilder().mergeFrom(value); } else { id_ = value; } } else { idBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = null; if (idBuilder_ != null) { idBuilder_.dispose(); idBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() { if (idBuilder_ != null) { return idBuilder_.getMessageOrBuilder(); } else { return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_; } } /** * optional .hadoop.yarn.ContainerIdProto id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getIdFieldBuilder() { if (idBuilder_ == null) { idBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getId(), getParentForChildren(), isClean()); id_ = null; } return idBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionContainerProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionContainerProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PreemptionContainerProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PreemptionResourceRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionResourceRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource(); /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.PreemptionResourceRequestProto} */ public static final class PreemptionResourceRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionResourceRequestProto) PreemptionResourceRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PreemptionResourceRequestProto.newBuilder() to construct. private PreemptionResourceRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PreemptionResourceRequestProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PreemptionResourceRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder.class); } private int bitField0_; public static final int RESOURCE_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto resource_; /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto) obj; if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PreemptionResourceRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionResourceRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance()) return this; if (other.hasResource()) { mergeResource(other.getResource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000001); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getResourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceRequestProto resource = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionResourceRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionResourceRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PreemptionResourceRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceBlacklistRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceBlacklistRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string blacklist_additions = 1; * @return A list containing the blacklistAdditions. */ java.util.List getBlacklistAdditionsList(); /** * repeated string blacklist_additions = 1; * @return The count of blacklistAdditions. */ int getBlacklistAdditionsCount(); /** * repeated string blacklist_additions = 1; * @param index The index of the element to return. * @return The blacklistAdditions at the given index. */ java.lang.String getBlacklistAdditions(int index); /** * repeated string blacklist_additions = 1; * @param index The index of the value to return. * @return The bytes of the blacklistAdditions at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistAdditionsBytes(int index); /** * repeated string blacklist_removals = 2; * @return A list containing the blacklistRemovals. */ java.util.List getBlacklistRemovalsList(); /** * repeated string blacklist_removals = 2; * @return The count of blacklistRemovals. */ int getBlacklistRemovalsCount(); /** * repeated string blacklist_removals = 2; * @param index The index of the element to return. * @return The blacklistRemovals at the given index. */ java.lang.String getBlacklistRemovals(int index); /** * repeated string blacklist_removals = 2; * @param index The index of the value to return. * @return The bytes of the blacklistRemovals at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistRemovalsBytes(int index); } /** * Protobuf type {@code hadoop.yarn.ResourceBlacklistRequestProto} */ public static final class ResourceBlacklistRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceBlacklistRequestProto) ResourceBlacklistRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceBlacklistRequestProto.newBuilder() to construct. private ResourceBlacklistRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceBlacklistRequestProto() { blacklistAdditions_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; blacklistRemovals_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceBlacklistRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder.class); } public static final int BLACKLIST_ADDITIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList blacklistAdditions_; /** * repeated string blacklist_additions = 1; * @return A list containing the blacklistAdditions. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getBlacklistAdditionsList() { return blacklistAdditions_; } /** * repeated string blacklist_additions = 1; * @return The count of blacklistAdditions. */ public int getBlacklistAdditionsCount() { return blacklistAdditions_.size(); } /** * repeated string blacklist_additions = 1; * @param index The index of the element to return. * @return The blacklistAdditions at the given index. */ public java.lang.String getBlacklistAdditions(int index) { return blacklistAdditions_.get(index); } /** * repeated string blacklist_additions = 1; * @param index The index of the value to return. * @return The bytes of the blacklistAdditions at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistAdditionsBytes(int index) { return blacklistAdditions_.getByteString(index); } public static final int BLACKLIST_REMOVALS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList blacklistRemovals_; /** * repeated string blacklist_removals = 2; * @return A list containing the blacklistRemovals. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getBlacklistRemovalsList() { return blacklistRemovals_; } /** * repeated string blacklist_removals = 2; * @return The count of blacklistRemovals. */ public int getBlacklistRemovalsCount() { return blacklistRemovals_.size(); } /** * repeated string blacklist_removals = 2; * @param index The index of the element to return. * @return The blacklistRemovals at the given index. */ public java.lang.String getBlacklistRemovals(int index) { return blacklistRemovals_.get(index); } /** * repeated string blacklist_removals = 2; * @param index The index of the value to return. * @return The bytes of the blacklistRemovals at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistRemovalsBytes(int index) { return blacklistRemovals_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < blacklistAdditions_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, blacklistAdditions_.getRaw(i)); } for (int i = 0; i < blacklistRemovals_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, blacklistRemovals_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < blacklistAdditions_.size(); i++) { dataSize += computeStringSizeNoTag(blacklistAdditions_.getRaw(i)); } size += dataSize; size += 1 * getBlacklistAdditionsList().size(); } { int dataSize = 0; for (int i = 0; i < blacklistRemovals_.size(); i++) { dataSize += computeStringSizeNoTag(blacklistRemovals_.getRaw(i)); } size += dataSize; size += 1 * getBlacklistRemovalsList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto) obj; if (!getBlacklistAdditionsList() .equals(other.getBlacklistAdditionsList())) return false; if (!getBlacklistRemovalsList() .equals(other.getBlacklistRemovalsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBlacklistAdditionsCount() > 0) { hash = (37 * hash) + BLACKLIST_ADDITIONS_FIELD_NUMBER; hash = (53 * hash) + getBlacklistAdditionsList().hashCode(); } if (getBlacklistRemovalsCount() > 0) { hash = (37 * hash) + BLACKLIST_REMOVALS_FIELD_NUMBER; hash = (53 * hash) + getBlacklistRemovalsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceBlacklistRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceBlacklistRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; blacklistAdditions_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); blacklistRemovals_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result) { if (((bitField0_ & 0x00000001) != 0)) { blacklistAdditions_ = blacklistAdditions_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.blacklistAdditions_ = blacklistAdditions_; if (((bitField0_ & 0x00000002) != 0)) { blacklistRemovals_ = blacklistRemovals_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.blacklistRemovals_ = blacklistRemovals_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance()) return this; if (!other.blacklistAdditions_.isEmpty()) { if (blacklistAdditions_.isEmpty()) { blacklistAdditions_ = other.blacklistAdditions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBlacklistAdditionsIsMutable(); blacklistAdditions_.addAll(other.blacklistAdditions_); } onChanged(); } if (!other.blacklistRemovals_.isEmpty()) { if (blacklistRemovals_.isEmpty()) { blacklistRemovals_ = other.blacklistRemovals_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureBlacklistRemovalsIsMutable(); blacklistRemovals_.addAll(other.blacklistRemovals_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureBlacklistAdditionsIsMutable(); blacklistAdditions_.add(bs); break; } // case 10 case 18: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureBlacklistRemovalsIsMutable(); blacklistRemovals_.add(bs); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList blacklistAdditions_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureBlacklistAdditionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { blacklistAdditions_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(blacklistAdditions_); bitField0_ |= 0x00000001; } } /** * repeated string blacklist_additions = 1; * @return A list containing the blacklistAdditions. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getBlacklistAdditionsList() { return blacklistAdditions_.getUnmodifiableView(); } /** * repeated string blacklist_additions = 1; * @return The count of blacklistAdditions. */ public int getBlacklistAdditionsCount() { return blacklistAdditions_.size(); } /** * repeated string blacklist_additions = 1; * @param index The index of the element to return. * @return The blacklistAdditions at the given index. */ public java.lang.String getBlacklistAdditions(int index) { return blacklistAdditions_.get(index); } /** * repeated string blacklist_additions = 1; * @param index The index of the value to return. * @return The bytes of the blacklistAdditions at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistAdditionsBytes(int index) { return blacklistAdditions_.getByteString(index); } /** * repeated string blacklist_additions = 1; * @param index The index to set the value at. * @param value The blacklistAdditions to set. * @return This builder for chaining. */ public Builder setBlacklistAdditions( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistAdditionsIsMutable(); blacklistAdditions_.set(index, value); onChanged(); return this; } /** * repeated string blacklist_additions = 1; * @param value The blacklistAdditions to add. * @return This builder for chaining. */ public Builder addBlacklistAdditions( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistAdditionsIsMutable(); blacklistAdditions_.add(value); onChanged(); return this; } /** * repeated string blacklist_additions = 1; * @param values The blacklistAdditions to add. * @return This builder for chaining. */ public Builder addAllBlacklistAdditions( java.lang.Iterable values) { ensureBlacklistAdditionsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, blacklistAdditions_); onChanged(); return this; } /** * repeated string blacklist_additions = 1; * @return This builder for chaining. */ public Builder clearBlacklistAdditions() { blacklistAdditions_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string blacklist_additions = 1; * @param value The bytes of the blacklistAdditions to add. * @return This builder for chaining. */ public Builder addBlacklistAdditionsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistAdditionsIsMutable(); blacklistAdditions_.add(value); onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList blacklistRemovals_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureBlacklistRemovalsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { blacklistRemovals_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(blacklistRemovals_); bitField0_ |= 0x00000002; } } /** * repeated string blacklist_removals = 2; * @return A list containing the blacklistRemovals. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getBlacklistRemovalsList() { return blacklistRemovals_.getUnmodifiableView(); } /** * repeated string blacklist_removals = 2; * @return The count of blacklistRemovals. */ public int getBlacklistRemovalsCount() { return blacklistRemovals_.size(); } /** * repeated string blacklist_removals = 2; * @param index The index of the element to return. * @return The blacklistRemovals at the given index. */ public java.lang.String getBlacklistRemovals(int index) { return blacklistRemovals_.get(index); } /** * repeated string blacklist_removals = 2; * @param index The index of the value to return. * @return The bytes of the blacklistRemovals at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getBlacklistRemovalsBytes(int index) { return blacklistRemovals_.getByteString(index); } /** * repeated string blacklist_removals = 2; * @param index The index to set the value at. * @param value The blacklistRemovals to set. * @return This builder for chaining. */ public Builder setBlacklistRemovals( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistRemovalsIsMutable(); blacklistRemovals_.set(index, value); onChanged(); return this; } /** * repeated string blacklist_removals = 2; * @param value The blacklistRemovals to add. * @return This builder for chaining. */ public Builder addBlacklistRemovals( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistRemovalsIsMutable(); blacklistRemovals_.add(value); onChanged(); return this; } /** * repeated string blacklist_removals = 2; * @param values The blacklistRemovals to add. * @return This builder for chaining. */ public Builder addAllBlacklistRemovals( java.lang.Iterable values) { ensureBlacklistRemovalsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, blacklistRemovals_); onChanged(); return this; } /** * repeated string blacklist_removals = 2; * @return This builder for chaining. */ public Builder clearBlacklistRemovals() { blacklistRemovals_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * repeated string blacklist_removals = 2; * @param value The bytes of the blacklistRemovals to add. * @return This builder for chaining. */ public Builder addBlacklistRemovalsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureBlacklistRemovalsIsMutable(); blacklistRemovals_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceBlacklistRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceBlacklistRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceBlacklistRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationSubmissionContextProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationSubmissionContextProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ boolean hasApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId(); /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder(); /** * optional string application_name = 2 [default = "N/A"]; * @return Whether the applicationName field is set. */ boolean hasApplicationName(); /** * optional string application_name = 2 [default = "N/A"]; * @return The applicationName. */ java.lang.String getApplicationName(); /** * optional string application_name = 2 [default = "N/A"]; * @return The bytes for applicationName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationNameBytes(); /** * optional string queue = 3 [default = "default"]; * @return Whether the queue field is set. */ boolean hasQueue(); /** * optional string queue = 3 [default = "default"]; * @return The queue. */ java.lang.String getQueue(); /** * optional string queue = 3 [default = "default"]; * @return The bytes for queue. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes(); /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return Whether the amContainerSpec field is set. */ boolean hasAmContainerSpec(); /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return The amContainerSpec. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec(); /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder(); /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return Whether the cancelTokensWhenComplete field is set. */ boolean hasCancelTokensWhenComplete(); /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return The cancelTokensWhenComplete. */ boolean getCancelTokensWhenComplete(); /** * optional bool unmanaged_am = 7 [default = false]; * @return Whether the unmanagedAm field is set. */ boolean hasUnmanagedAm(); /** * optional bool unmanaged_am = 7 [default = false]; * @return The unmanagedAm. */ boolean getUnmanagedAm(); /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return Whether the maxAppAttempts field is set. */ boolean hasMaxAppAttempts(); /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return The maxAppAttempts. */ int getMaxAppAttempts(); /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource(); /** * optional .hadoop.yarn.ResourceProto resource = 9; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder(); /** * optional string applicationType = 10 [default = "YARN"]; * @return Whether the applicationType field is set. */ boolean hasApplicationType(); /** * optional string applicationType = 10 [default = "YARN"]; * @return The applicationType. */ java.lang.String getApplicationType(); /** * optional string applicationType = 10 [default = "YARN"]; * @return The bytes for applicationType. */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes(); /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return Whether the keepContainersAcrossApplicationAttempts field is set. */ boolean hasKeepContainersAcrossApplicationAttempts(); /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return The keepContainersAcrossApplicationAttempts. */ boolean getKeepContainersAcrossApplicationAttempts(); /** * repeated string applicationTags = 12; * @return A list containing the applicationTags. */ java.util.List getApplicationTagsList(); /** * repeated string applicationTags = 12; * @return The count of applicationTags. */ int getApplicationTagsCount(); /** * repeated string applicationTags = 12; * @param index The index of the element to return. * @return The applicationTags at the given index. */ java.lang.String getApplicationTags(int index); /** * repeated string applicationTags = 12; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index); /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return Whether the attemptFailuresValidityInterval field is set. */ boolean hasAttemptFailuresValidityInterval(); /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return The attemptFailuresValidityInterval. */ long getAttemptFailuresValidityInterval(); /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return Whether the logAggregationContext field is set. */ boolean hasLogAggregationContext(); /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return The logAggregationContext. */ org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext(); /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return Whether the reservationId field is set. */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return The reservationId. */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); /** * optional string node_label_expression = 16; * @return Whether the nodeLabelExpression field is set. */ boolean hasNodeLabelExpression(); /** * optional string node_label_expression = 16; * @return The nodeLabelExpression. */ java.lang.String getNodeLabelExpression(); /** * optional string node_label_expression = 16; * @return The bytes for nodeLabelExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes(); /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ java.util.List getAmContainerResourceRequestList(); /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index); /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ int getAmContainerResourceRequestCount(); /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ java.util.List getAmContainerResourceRequestOrBuilderList(); /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder( int index); /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ java.util.List getApplicationTimeoutsList(); /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index); /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ int getApplicationTimeoutsCount(); /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ java.util.List getApplicationTimeoutsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index); /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ java.util.List getApplicationSchedulingPropertiesList(); /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index); /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ int getApplicationSchedulingPropertiesCount(); /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ java.util.List getApplicationSchedulingPropertiesOrBuilderList(); /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder( int index); } /** *
   *//////////////////////////////////////////////////////////////////////
   * //// From client_RM_Protocol /////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * 
* * Protobuf type {@code hadoop.yarn.ApplicationSubmissionContextProto} */ public static final class ApplicationSubmissionContextProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationSubmissionContextProto) ApplicationSubmissionContextProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationSubmissionContextProto.newBuilder() to construct. private ApplicationSubmissionContextProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationSubmissionContextProto() { applicationName_ = "N/A"; queue_ = "default"; cancelTokensWhenComplete_ = true; applicationType_ = "YARN"; applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; attemptFailuresValidityInterval_ = -1L; nodeLabelExpression_ = ""; amContainerResourceRequest_ = java.util.Collections.emptyList(); applicationTimeouts_ = java.util.Collections.emptyList(); applicationSchedulingProperties_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationSubmissionContextProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder.class); } private int bitField0_; public static final int APPLICATION_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ @java.lang.Override public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } public static final int APPLICATION_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object applicationName_ = "N/A"; /** * optional string application_name = 2 [default = "N/A"]; * @return Whether the applicationName field is set. */ @java.lang.Override public boolean hasApplicationName() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string application_name = 2 [default = "N/A"]; * @return The applicationName. */ @java.lang.Override public java.lang.String getApplicationName() { java.lang.Object ref = applicationName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationName_ = s; } return s; } } /** * optional string application_name = 2 [default = "N/A"]; * @return The bytes for applicationName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationNameBytes() { java.lang.Object ref = applicationName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int QUEUE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object queue_ = "default"; /** * optional string queue = 3 [default = "default"]; * @return Whether the queue field is set. */ @java.lang.Override public boolean hasQueue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string queue = 3 [default = "default"]; * @return The queue. */ @java.lang.Override public java.lang.String getQueue() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } } /** * optional string queue = 3 [default = "default"]; * @return The bytes for queue. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int PRIORITY_FIELD_NUMBER = 4; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } public static final int AM_CONTAINER_SPEC_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto amContainerSpec_; /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return Whether the amContainerSpec field is set. */ @java.lang.Override public boolean hasAmContainerSpec() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return The amContainerSpec. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec() { return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_; } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder() { return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_; } public static final int CANCEL_TOKENS_WHEN_COMPLETE_FIELD_NUMBER = 6; private boolean cancelTokensWhenComplete_ = true; /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return Whether the cancelTokensWhenComplete field is set. */ @java.lang.Override public boolean hasCancelTokensWhenComplete() { return ((bitField0_ & 0x00000020) != 0); } /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return The cancelTokensWhenComplete. */ @java.lang.Override public boolean getCancelTokensWhenComplete() { return cancelTokensWhenComplete_; } public static final int UNMANAGED_AM_FIELD_NUMBER = 7; private boolean unmanagedAm_ = false; /** * optional bool unmanaged_am = 7 [default = false]; * @return Whether the unmanagedAm field is set. */ @java.lang.Override public boolean hasUnmanagedAm() { return ((bitField0_ & 0x00000040) != 0); } /** * optional bool unmanaged_am = 7 [default = false]; * @return The unmanagedAm. */ @java.lang.Override public boolean getUnmanagedAm() { return unmanagedAm_; } public static final int MAXAPPATTEMPTS_FIELD_NUMBER = 8; private int maxAppAttempts_ = 0; /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return Whether the maxAppAttempts field is set. */ @java.lang.Override public boolean hasMaxAppAttempts() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return The maxAppAttempts. */ @java.lang.Override public int getMaxAppAttempts() { return maxAppAttempts_; } public static final int RESOURCE_FIELD_NUMBER = 9; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000100) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } public static final int APPLICATIONTYPE_FIELD_NUMBER = 10; @SuppressWarnings("serial") private volatile java.lang.Object applicationType_ = "YARN"; /** * optional string applicationType = 10 [default = "YARN"]; * @return Whether the applicationType field is set. */ @java.lang.Override public boolean hasApplicationType() { return ((bitField0_ & 0x00000200) != 0); } /** * optional string applicationType = 10 [default = "YARN"]; * @return The applicationType. */ @java.lang.Override public java.lang.String getApplicationType() { java.lang.Object ref = applicationType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationType_ = s; } return s; } } /** * optional string applicationType = 10 [default = "YARN"]; * @return The bytes for applicationType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes() { java.lang.Object ref = applicationType_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int KEEP_CONTAINERS_ACROSS_APPLICATION_ATTEMPTS_FIELD_NUMBER = 11; private boolean keepContainersAcrossApplicationAttempts_ = false; /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return Whether the keepContainersAcrossApplicationAttempts field is set. */ @java.lang.Override public boolean hasKeepContainersAcrossApplicationAttempts() { return ((bitField0_ & 0x00000400) != 0); } /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return The keepContainersAcrossApplicationAttempts. */ @java.lang.Override public boolean getKeepContainersAcrossApplicationAttempts() { return keepContainersAcrossApplicationAttempts_; } public static final int APPLICATIONTAGS_FIELD_NUMBER = 12; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_; /** * repeated string applicationTags = 12; * @return A list containing the applicationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_; } /** * repeated string applicationTags = 12; * @return The count of applicationTags. */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 12; * @param index The index of the element to return. * @return The applicationTags at the given index. */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 12; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } public static final int ATTEMPT_FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER = 13; private long attemptFailuresValidityInterval_ = -1L; /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return Whether the attemptFailuresValidityInterval field is set. */ @java.lang.Override public boolean hasAttemptFailuresValidityInterval() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return The attemptFailuresValidityInterval. */ @java.lang.Override public long getAttemptFailuresValidityInterval() { return attemptFailuresValidityInterval_; } public static final int LOG_AGGREGATION_CONTEXT_FIELD_NUMBER = 14; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_; /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return Whether the logAggregationContext field is set. */ @java.lang.Override public boolean hasLogAggregationContext() { return ((bitField0_ & 0x00001000) != 0); } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return The logAggregationContext. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() { return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_; } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() { return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_; } public static final int RESERVATION_ID_FIELD_NUMBER = 15; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return Whether the reservationId field is set. */ @java.lang.Override public boolean hasReservationId() { return ((bitField0_ & 0x00002000) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return The reservationId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } public static final int NODE_LABEL_EXPRESSION_FIELD_NUMBER = 16; @SuppressWarnings("serial") private volatile java.lang.Object nodeLabelExpression_ = ""; /** * optional string node_label_expression = 16; * @return Whether the nodeLabelExpression field is set. */ @java.lang.Override public boolean hasNodeLabelExpression() { return ((bitField0_ & 0x00004000) != 0); } /** * optional string node_label_expression = 16; * @return The nodeLabelExpression. */ @java.lang.Override public java.lang.String getNodeLabelExpression() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabelExpression_ = s; } return s; } } /** * optional string node_label_expression = 16; * @return The bytes for nodeLabelExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int AM_CONTAINER_RESOURCE_REQUEST_FIELD_NUMBER = 17; @SuppressWarnings("serial") private java.util.List amContainerResourceRequest_; /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ @java.lang.Override public java.util.List getAmContainerResourceRequestList() { return amContainerResourceRequest_; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ @java.lang.Override public java.util.List getAmContainerResourceRequestOrBuilderList() { return amContainerResourceRequest_; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ @java.lang.Override public int getAmContainerResourceRequestCount() { return amContainerResourceRequest_.size(); } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index) { return amContainerResourceRequest_.get(index); } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder( int index) { return amContainerResourceRequest_.get(index); } public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 18; @SuppressWarnings("serial") private java.util.List applicationTimeouts_; /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ @java.lang.Override public java.util.List getApplicationTimeoutsList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ @java.lang.Override public java.util.List getApplicationTimeoutsOrBuilderList() { return applicationTimeouts_; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ @java.lang.Override public int getApplicationTimeoutsCount() { return applicationTimeouts_.size(); } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index) { return applicationTimeouts_.get(index); } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { return applicationTimeouts_.get(index); } public static final int APPLICATION_SCHEDULING_PROPERTIES_FIELD_NUMBER = 19; @SuppressWarnings("serial") private java.util.List applicationSchedulingProperties_; /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ @java.lang.Override public java.util.List getApplicationSchedulingPropertiesList() { return applicationSchedulingProperties_; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ @java.lang.Override public java.util.List getApplicationSchedulingPropertiesOrBuilderList() { return applicationSchedulingProperties_; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ @java.lang.Override public int getApplicationSchedulingPropertiesCount() { return applicationSchedulingProperties_.size(); } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index) { return applicationSchedulingProperties_.get(index); } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder( int index) { return applicationSchedulingProperties_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAmContainerResourceRequestCount(); i++) { if (!getAmContainerResourceRequest(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, applicationName_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, queue_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeMessage(4, getPriority()); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(5, getAmContainerSpec()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeBool(6, cancelTokensWhenComplete_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeBool(7, unmanagedAm_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeInt32(8, maxAppAttempts_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeMessage(9, getResource()); } if (((bitField0_ & 0x00000200) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, applicationType_); } if (((bitField0_ & 0x00000400) != 0)) { output.writeBool(11, keepContainersAcrossApplicationAttempts_); } for (int i = 0; i < applicationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 12, applicationTags_.getRaw(i)); } if (((bitField0_ & 0x00000800) != 0)) { output.writeInt64(13, attemptFailuresValidityInterval_); } if (((bitField0_ & 0x00001000) != 0)) { output.writeMessage(14, getLogAggregationContext()); } if (((bitField0_ & 0x00002000) != 0)) { output.writeMessage(15, getReservationId()); } if (((bitField0_ & 0x00004000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 16, nodeLabelExpression_); } for (int i = 0; i < amContainerResourceRequest_.size(); i++) { output.writeMessage(17, amContainerResourceRequest_.get(i)); } for (int i = 0; i < applicationTimeouts_.size(); i++) { output.writeMessage(18, applicationTimeouts_.get(i)); } for (int i = 0; i < applicationSchedulingProperties_.size(); i++) { output.writeMessage(19, applicationSchedulingProperties_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getApplicationId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, applicationName_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, queue_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, getPriority()); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getAmContainerSpec()); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(6, cancelTokensWhenComplete_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(7, unmanagedAm_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(8, maxAppAttempts_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(9, getResource()); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(10, applicationType_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(11, keepContainersAcrossApplicationAttempts_); } { int dataSize = 0; for (int i = 0; i < applicationTags_.size(); i++) { dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i)); } size += dataSize; size += 1 * getApplicationTagsList().size(); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(13, attemptFailuresValidityInterval_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(14, getLogAggregationContext()); } if (((bitField0_ & 0x00002000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(15, getReservationId()); } if (((bitField0_ & 0x00004000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(16, nodeLabelExpression_); } for (int i = 0; i < amContainerResourceRequest_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(17, amContainerResourceRequest_.get(i)); } for (int i = 0; i < applicationTimeouts_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(18, applicationTimeouts_.get(i)); } for (int i = 0; i < applicationSchedulingProperties_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(19, applicationSchedulingProperties_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto) obj; if (hasApplicationId() != other.hasApplicationId()) return false; if (hasApplicationId()) { if (!getApplicationId() .equals(other.getApplicationId())) return false; } if (hasApplicationName() != other.hasApplicationName()) return false; if (hasApplicationName()) { if (!getApplicationName() .equals(other.getApplicationName())) return false; } if (hasQueue() != other.hasQueue()) return false; if (hasQueue()) { if (!getQueue() .equals(other.getQueue())) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (hasAmContainerSpec() != other.hasAmContainerSpec()) return false; if (hasAmContainerSpec()) { if (!getAmContainerSpec() .equals(other.getAmContainerSpec())) return false; } if (hasCancelTokensWhenComplete() != other.hasCancelTokensWhenComplete()) return false; if (hasCancelTokensWhenComplete()) { if (getCancelTokensWhenComplete() != other.getCancelTokensWhenComplete()) return false; } if (hasUnmanagedAm() != other.hasUnmanagedAm()) return false; if (hasUnmanagedAm()) { if (getUnmanagedAm() != other.getUnmanagedAm()) return false; } if (hasMaxAppAttempts() != other.hasMaxAppAttempts()) return false; if (hasMaxAppAttempts()) { if (getMaxAppAttempts() != other.getMaxAppAttempts()) return false; } if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (hasApplicationType() != other.hasApplicationType()) return false; if (hasApplicationType()) { if (!getApplicationType() .equals(other.getApplicationType())) return false; } if (hasKeepContainersAcrossApplicationAttempts() != other.hasKeepContainersAcrossApplicationAttempts()) return false; if (hasKeepContainersAcrossApplicationAttempts()) { if (getKeepContainersAcrossApplicationAttempts() != other.getKeepContainersAcrossApplicationAttempts()) return false; } if (!getApplicationTagsList() .equals(other.getApplicationTagsList())) return false; if (hasAttemptFailuresValidityInterval() != other.hasAttemptFailuresValidityInterval()) return false; if (hasAttemptFailuresValidityInterval()) { if (getAttemptFailuresValidityInterval() != other.getAttemptFailuresValidityInterval()) return false; } if (hasLogAggregationContext() != other.hasLogAggregationContext()) return false; if (hasLogAggregationContext()) { if (!getLogAggregationContext() .equals(other.getLogAggregationContext())) return false; } if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false; if (hasNodeLabelExpression()) { if (!getNodeLabelExpression() .equals(other.getNodeLabelExpression())) return false; } if (!getAmContainerResourceRequestList() .equals(other.getAmContainerResourceRequestList())) return false; if (!getApplicationTimeoutsList() .equals(other.getApplicationTimeoutsList())) return false; if (!getApplicationSchedulingPropertiesList() .equals(other.getApplicationSchedulingPropertiesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationId()) { hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER; hash = (53 * hash) + getApplicationId().hashCode(); } if (hasApplicationName()) { hash = (37 * hash) + APPLICATION_NAME_FIELD_NUMBER; hash = (53 * hash) + getApplicationName().hashCode(); } if (hasQueue()) { hash = (37 * hash) + QUEUE_FIELD_NUMBER; hash = (53 * hash) + getQueue().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } if (hasAmContainerSpec()) { hash = (37 * hash) + AM_CONTAINER_SPEC_FIELD_NUMBER; hash = (53 * hash) + getAmContainerSpec().hashCode(); } if (hasCancelTokensWhenComplete()) { hash = (37 * hash) + CANCEL_TOKENS_WHEN_COMPLETE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getCancelTokensWhenComplete()); } if (hasUnmanagedAm()) { hash = (37 * hash) + UNMANAGED_AM_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getUnmanagedAm()); } if (hasMaxAppAttempts()) { hash = (37 * hash) + MAXAPPATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + getMaxAppAttempts(); } if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } if (hasApplicationType()) { hash = (37 * hash) + APPLICATIONTYPE_FIELD_NUMBER; hash = (53 * hash) + getApplicationType().hashCode(); } if (hasKeepContainersAcrossApplicationAttempts()) { hash = (37 * hash) + KEEP_CONTAINERS_ACROSS_APPLICATION_ATTEMPTS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getKeepContainersAcrossApplicationAttempts()); } if (getApplicationTagsCount() > 0) { hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTagsList().hashCode(); } if (hasAttemptFailuresValidityInterval()) { hash = (37 * hash) + ATTEMPT_FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAttemptFailuresValidityInterval()); } if (hasLogAggregationContext()) { hash = (37 * hash) + LOG_AGGREGATION_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getLogAggregationContext().hashCode(); } if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } if (hasNodeLabelExpression()) { hash = (37 * hash) + NODE_LABEL_EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getNodeLabelExpression().hashCode(); } if (getAmContainerResourceRequestCount() > 0) { hash = (37 * hash) + AM_CONTAINER_RESOURCE_REQUEST_FIELD_NUMBER; hash = (53 * hash) + getAmContainerResourceRequestList().hashCode(); } if (getApplicationTimeoutsCount() > 0) { hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER; hash = (53 * hash) + getApplicationTimeoutsList().hashCode(); } if (getApplicationSchedulingPropertiesCount() > 0) { hash = (37 * hash) + APPLICATION_SCHEDULING_PROPERTIES_FIELD_NUMBER; hash = (53 * hash) + getApplicationSchedulingPropertiesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     *//////////////////////////////////////////////////////////////////////
     * //// From client_RM_Protocol /////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * 
* * Protobuf type {@code hadoop.yarn.ApplicationSubmissionContextProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationSubmissionContextProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getApplicationIdFieldBuilder(); getPriorityFieldBuilder(); getAmContainerSpecFieldBuilder(); getResourceFieldBuilder(); getLogAggregationContextFieldBuilder(); getReservationIdFieldBuilder(); getAmContainerResourceRequestFieldBuilder(); getApplicationTimeoutsFieldBuilder(); getApplicationSchedulingPropertiesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } applicationName_ = "N/A"; queue_ = "default"; priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } amContainerSpec_ = null; if (amContainerSpecBuilder_ != null) { amContainerSpecBuilder_.dispose(); amContainerSpecBuilder_ = null; } cancelTokensWhenComplete_ = true; unmanagedAm_ = false; maxAppAttempts_ = 0; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } applicationType_ = "YARN"; keepContainersAcrossApplicationAttempts_ = false; applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000800); attemptFailuresValidityInterval_ = -1L; logAggregationContext_ = null; if (logAggregationContextBuilder_ != null) { logAggregationContextBuilder_.dispose(); logAggregationContextBuilder_ = null; } reservationId_ = null; if (reservationIdBuilder_ != null) { reservationIdBuilder_.dispose(); reservationIdBuilder_ = null; } nodeLabelExpression_ = ""; if (amContainerResourceRequestBuilder_ == null) { amContainerResourceRequest_ = java.util.Collections.emptyList(); } else { amContainerResourceRequest_ = null; amContainerResourceRequestBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00010000); if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); } else { applicationTimeouts_ = null; applicationTimeoutsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00020000); if (applicationSchedulingPropertiesBuilder_ == null) { applicationSchedulingProperties_ = java.util.Collections.emptyList(); } else { applicationSchedulingProperties_ = null; applicationSchedulingPropertiesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00040000); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result) { if (((bitField0_ & 0x00000800) != 0)) { applicationTags_ = applicationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000800); } result.applicationTags_ = applicationTags_; if (amContainerResourceRequestBuilder_ == null) { if (((bitField0_ & 0x00010000) != 0)) { amContainerResourceRequest_ = java.util.Collections.unmodifiableList(amContainerResourceRequest_); bitField0_ = (bitField0_ & ~0x00010000); } result.amContainerResourceRequest_ = amContainerResourceRequest_; } else { result.amContainerResourceRequest_ = amContainerResourceRequestBuilder_.build(); } if (applicationTimeoutsBuilder_ == null) { if (((bitField0_ & 0x00020000) != 0)) { applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_); bitField0_ = (bitField0_ & ~0x00020000); } result.applicationTimeouts_ = applicationTimeouts_; } else { result.applicationTimeouts_ = applicationTimeoutsBuilder_.build(); } if (applicationSchedulingPropertiesBuilder_ == null) { if (((bitField0_ & 0x00040000) != 0)) { applicationSchedulingProperties_ = java.util.Collections.unmodifiableList(applicationSchedulingProperties_); bitField0_ = (bitField0_ & ~0x00040000); } result.applicationSchedulingProperties_ = applicationSchedulingProperties_; } else { result.applicationSchedulingProperties_ = applicationSchedulingPropertiesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationId_ = applicationIdBuilder_ == null ? applicationId_ : applicationIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.applicationName_ = applicationName_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.queue_ = queue_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.amContainerSpec_ = amContainerSpecBuilder_ == null ? amContainerSpec_ : amContainerSpecBuilder_.build(); to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.cancelTokensWhenComplete_ = cancelTokensWhenComplete_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.unmanagedAm_ = unmanagedAm_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.maxAppAttempts_ = maxAppAttempts_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.applicationType_ = applicationType_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000400) != 0)) { result.keepContainersAcrossApplicationAttempts_ = keepContainersAcrossApplicationAttempts_; to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00001000) != 0)) { result.attemptFailuresValidityInterval_ = attemptFailuresValidityInterval_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00002000) != 0)) { result.logAggregationContext_ = logAggregationContextBuilder_ == null ? logAggregationContext_ : logAggregationContextBuilder_.build(); to_bitField0_ |= 0x00001000; } if (((from_bitField0_ & 0x00004000) != 0)) { result.reservationId_ = reservationIdBuilder_ == null ? reservationId_ : reservationIdBuilder_.build(); to_bitField0_ |= 0x00002000; } if (((from_bitField0_ & 0x00008000) != 0)) { result.nodeLabelExpression_ = nodeLabelExpression_; to_bitField0_ |= 0x00004000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance()) return this; if (other.hasApplicationId()) { mergeApplicationId(other.getApplicationId()); } if (other.hasApplicationName()) { applicationName_ = other.applicationName_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasQueue()) { queue_ = other.queue_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasPriority()) { mergePriority(other.getPriority()); } if (other.hasAmContainerSpec()) { mergeAmContainerSpec(other.getAmContainerSpec()); } if (other.hasCancelTokensWhenComplete()) { setCancelTokensWhenComplete(other.getCancelTokensWhenComplete()); } if (other.hasUnmanagedAm()) { setUnmanagedAm(other.getUnmanagedAm()); } if (other.hasMaxAppAttempts()) { setMaxAppAttempts(other.getMaxAppAttempts()); } if (other.hasResource()) { mergeResource(other.getResource()); } if (other.hasApplicationType()) { applicationType_ = other.applicationType_; bitField0_ |= 0x00000200; onChanged(); } if (other.hasKeepContainersAcrossApplicationAttempts()) { setKeepContainersAcrossApplicationAttempts(other.getKeepContainersAcrossApplicationAttempts()); } if (!other.applicationTags_.isEmpty()) { if (applicationTags_.isEmpty()) { applicationTags_ = other.applicationTags_; bitField0_ = (bitField0_ & ~0x00000800); } else { ensureApplicationTagsIsMutable(); applicationTags_.addAll(other.applicationTags_); } onChanged(); } if (other.hasAttemptFailuresValidityInterval()) { setAttemptFailuresValidityInterval(other.getAttemptFailuresValidityInterval()); } if (other.hasLogAggregationContext()) { mergeLogAggregationContext(other.getLogAggregationContext()); } if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } if (other.hasNodeLabelExpression()) { nodeLabelExpression_ = other.nodeLabelExpression_; bitField0_ |= 0x00008000; onChanged(); } if (amContainerResourceRequestBuilder_ == null) { if (!other.amContainerResourceRequest_.isEmpty()) { if (amContainerResourceRequest_.isEmpty()) { amContainerResourceRequest_ = other.amContainerResourceRequest_; bitField0_ = (bitField0_ & ~0x00010000); } else { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.addAll(other.amContainerResourceRequest_); } onChanged(); } } else { if (!other.amContainerResourceRequest_.isEmpty()) { if (amContainerResourceRequestBuilder_.isEmpty()) { amContainerResourceRequestBuilder_.dispose(); amContainerResourceRequestBuilder_ = null; amContainerResourceRequest_ = other.amContainerResourceRequest_; bitField0_ = (bitField0_ & ~0x00010000); amContainerResourceRequestBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAmContainerResourceRequestFieldBuilder() : null; } else { amContainerResourceRequestBuilder_.addAllMessages(other.amContainerResourceRequest_); } } } if (applicationTimeoutsBuilder_ == null) { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeouts_.isEmpty()) { applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00020000); } else { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.addAll(other.applicationTimeouts_); } onChanged(); } } else { if (!other.applicationTimeouts_.isEmpty()) { if (applicationTimeoutsBuilder_.isEmpty()) { applicationTimeoutsBuilder_.dispose(); applicationTimeoutsBuilder_ = null; applicationTimeouts_ = other.applicationTimeouts_; bitField0_ = (bitField0_ & ~0x00020000); applicationTimeoutsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationTimeoutsFieldBuilder() : null; } else { applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_); } } } if (applicationSchedulingPropertiesBuilder_ == null) { if (!other.applicationSchedulingProperties_.isEmpty()) { if (applicationSchedulingProperties_.isEmpty()) { applicationSchedulingProperties_ = other.applicationSchedulingProperties_; bitField0_ = (bitField0_ & ~0x00040000); } else { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.addAll(other.applicationSchedulingProperties_); } onChanged(); } } else { if (!other.applicationSchedulingProperties_.isEmpty()) { if (applicationSchedulingPropertiesBuilder_.isEmpty()) { applicationSchedulingPropertiesBuilder_.dispose(); applicationSchedulingPropertiesBuilder_ = null; applicationSchedulingProperties_ = other.applicationSchedulingProperties_; bitField0_ = (bitField0_ & ~0x00040000); applicationSchedulingPropertiesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationSchedulingPropertiesFieldBuilder() : null; } else { applicationSchedulingPropertiesBuilder_.addAllMessages(other.applicationSchedulingProperties_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } for (int i = 0; i < getAmContainerResourceRequestCount(); i++) { if (!getAmContainerResourceRequest(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getApplicationIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { applicationName_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { queue_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 case 42: { input.readMessage( getAmContainerSpecFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 48: { cancelTokensWhenComplete_ = input.readBool(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { unmanagedAm_ = input.readBool(); bitField0_ |= 0x00000040; break; } // case 56 case 64: { maxAppAttempts_ = input.readInt32(); bitField0_ |= 0x00000080; break; } // case 64 case 74: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000100; break; } // case 74 case 82: { applicationType_ = input.readBytes(); bitField0_ |= 0x00000200; break; } // case 82 case 88: { keepContainersAcrossApplicationAttempts_ = input.readBool(); bitField0_ |= 0x00000400; break; } // case 88 case 98: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureApplicationTagsIsMutable(); applicationTags_.add(bs); break; } // case 98 case 104: { attemptFailuresValidityInterval_ = input.readInt64(); bitField0_ |= 0x00001000; break; } // case 104 case 114: { input.readMessage( getLogAggregationContextFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00002000; break; } // case 114 case 122: { input.readMessage( getReservationIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00004000; break; } // case 122 case 130: { nodeLabelExpression_ = input.readBytes(); bitField0_ |= 0x00008000; break; } // case 130 case 138: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.PARSER, extensionRegistry); if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.add(m); } else { amContainerResourceRequestBuilder_.addMessage(m); } break; } // case 138 case 146: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.PARSER, extensionRegistry); if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(m); } else { applicationTimeoutsBuilder_.addMessage(m); } break; } // case 146 case 154: { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry); if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.add(m); } else { applicationSchedulingPropertiesBuilder_.addMessage(m); } break; } // case 154 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_; /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return Whether the applicationId field is set. */ public boolean hasApplicationId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; * @return The applicationId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() { if (applicationIdBuilder_ == null) { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } else { return applicationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } applicationId_ = value; } else { applicationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder setApplicationId( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) { if (applicationIdBuilder_ == null) { applicationId_ = builderForValue.build(); } else { applicationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder mergeApplicationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) { if (applicationIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && applicationId_ != null && applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) { getApplicationIdBuilder().mergeFrom(value); } else { applicationId_ = value; } } else { applicationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public Builder clearApplicationId() { bitField0_ = (bitField0_ & ~0x00000001); applicationId_ = null; if (applicationIdBuilder_ != null) { applicationIdBuilder_.dispose(); applicationIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApplicationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() { if (applicationIdBuilder_ != null) { return applicationIdBuilder_.getMessageOrBuilder(); } else { return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_; } } /** * optional .hadoop.yarn.ApplicationIdProto application_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> getApplicationIdFieldBuilder() { if (applicationIdBuilder_ == null) { applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>( getApplicationId(), getParentForChildren(), isClean()); applicationId_ = null; } return applicationIdBuilder_; } private java.lang.Object applicationName_ = "N/A"; /** * optional string application_name = 2 [default = "N/A"]; * @return Whether the applicationName field is set. */ public boolean hasApplicationName() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string application_name = 2 [default = "N/A"]; * @return The applicationName. */ public java.lang.String getApplicationName() { java.lang.Object ref = applicationName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string application_name = 2 [default = "N/A"]; * @return The bytes for applicationName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationNameBytes() { java.lang.Object ref = applicationName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string application_name = 2 [default = "N/A"]; * @param value The applicationName to set. * @return This builder for chaining. */ public Builder setApplicationName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } applicationName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string application_name = 2 [default = "N/A"]; * @return This builder for chaining. */ public Builder clearApplicationName() { applicationName_ = getDefaultInstance().getApplicationName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string application_name = 2 [default = "N/A"]; * @param value The bytes for applicationName to set. * @return This builder for chaining. */ public Builder setApplicationNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } applicationName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object queue_ = "default"; /** * optional string queue = 3 [default = "default"]; * @return Whether the queue field is set. */ public boolean hasQueue() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string queue = 3 [default = "default"]; * @return The queue. */ public java.lang.String getQueue() { java.lang.Object ref = queue_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queue = 3 [default = "default"]; * @return The bytes for queue. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueBytes() { java.lang.Object ref = queue_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queue_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queue = 3 [default = "default"]; * @param value The queue to set. * @return This builder for chaining. */ public Builder setQueue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string queue = 3 [default = "default"]; * @return This builder for chaining. */ public Builder clearQueue() { queue_ = getDefaultInstance().getQueue(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string queue = 3 [default = "default"]; * @param value The bytes for queue to set. * @return This builder for chaining. */ public Builder setQueueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 4; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000008); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000008; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 4; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto amContainerSpec_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> amContainerSpecBuilder_; /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return Whether the amContainerSpec field is set. */ public boolean hasAmContainerSpec() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; * @return The amContainerSpec. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec() { if (amContainerSpecBuilder_ == null) { return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_; } else { return amContainerSpecBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public Builder setAmContainerSpec(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (amContainerSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } amContainerSpec_ = value; } else { amContainerSpecBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public Builder setAmContainerSpec( org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) { if (amContainerSpecBuilder_ == null) { amContainerSpec_ = builderForValue.build(); } else { amContainerSpecBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public Builder mergeAmContainerSpec(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) { if (amContainerSpecBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && amContainerSpec_ != null && amContainerSpec_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) { getAmContainerSpecBuilder().mergeFrom(value); } else { amContainerSpec_ = value; } } else { amContainerSpecBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public Builder clearAmContainerSpec() { bitField0_ = (bitField0_ & ~0x00000010); amContainerSpec_ = null; if (amContainerSpecBuilder_ != null) { amContainerSpecBuilder_.dispose(); amContainerSpecBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getAmContainerSpecBuilder() { bitField0_ |= 0x00000010; onChanged(); return getAmContainerSpecFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder() { if (amContainerSpecBuilder_ != null) { return amContainerSpecBuilder_.getMessageOrBuilder(); } else { return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_; } } /** * optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> getAmContainerSpecFieldBuilder() { if (amContainerSpecBuilder_ == null) { amContainerSpecBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>( getAmContainerSpec(), getParentForChildren(), isClean()); amContainerSpec_ = null; } return amContainerSpecBuilder_; } private boolean cancelTokensWhenComplete_ = true; /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return Whether the cancelTokensWhenComplete field is set. */ @java.lang.Override public boolean hasCancelTokensWhenComplete() { return ((bitField0_ & 0x00000020) != 0); } /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return The cancelTokensWhenComplete. */ @java.lang.Override public boolean getCancelTokensWhenComplete() { return cancelTokensWhenComplete_; } /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @param value The cancelTokensWhenComplete to set. * @return This builder for chaining. */ public Builder setCancelTokensWhenComplete(boolean value) { cancelTokensWhenComplete_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional bool cancel_tokens_when_complete = 6 [default = true]; * @return This builder for chaining. */ public Builder clearCancelTokensWhenComplete() { bitField0_ = (bitField0_ & ~0x00000020); cancelTokensWhenComplete_ = true; onChanged(); return this; } private boolean unmanagedAm_ ; /** * optional bool unmanaged_am = 7 [default = false]; * @return Whether the unmanagedAm field is set. */ @java.lang.Override public boolean hasUnmanagedAm() { return ((bitField0_ & 0x00000040) != 0); } /** * optional bool unmanaged_am = 7 [default = false]; * @return The unmanagedAm. */ @java.lang.Override public boolean getUnmanagedAm() { return unmanagedAm_; } /** * optional bool unmanaged_am = 7 [default = false]; * @param value The unmanagedAm to set. * @return This builder for chaining. */ public Builder setUnmanagedAm(boolean value) { unmanagedAm_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional bool unmanaged_am = 7 [default = false]; * @return This builder for chaining. */ public Builder clearUnmanagedAm() { bitField0_ = (bitField0_ & ~0x00000040); unmanagedAm_ = false; onChanged(); return this; } private int maxAppAttempts_ ; /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return Whether the maxAppAttempts field is set. */ @java.lang.Override public boolean hasMaxAppAttempts() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return The maxAppAttempts. */ @java.lang.Override public int getMaxAppAttempts() { return maxAppAttempts_; } /** * optional int32 maxAppAttempts = 8 [default = 0]; * @param value The maxAppAttempts to set. * @return This builder for chaining. */ public Builder setMaxAppAttempts(int value) { maxAppAttempts_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional int32 maxAppAttempts = 8 [default = 0]; * @return This builder for chaining. */ public Builder clearMaxAppAttempts() { bitField0_ = (bitField0_ & ~0x00000080); maxAppAttempts_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000100) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 9; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000100) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000100); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() { bitField0_ |= 0x00000100; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceProto resource = 9; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } private java.lang.Object applicationType_ = "YARN"; /** * optional string applicationType = 10 [default = "YARN"]; * @return Whether the applicationType field is set. */ public boolean hasApplicationType() { return ((bitField0_ & 0x00000200) != 0); } /** * optional string applicationType = 10 [default = "YARN"]; * @return The applicationType. */ public java.lang.String getApplicationType() { java.lang.Object ref = applicationType_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { applicationType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string applicationType = 10 [default = "YARN"]; * @return The bytes for applicationType. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTypeBytes() { java.lang.Object ref = applicationType_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); applicationType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string applicationType = 10 [default = "YARN"]; * @param value The applicationType to set. * @return This builder for chaining. */ public Builder setApplicationType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } applicationType_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional string applicationType = 10 [default = "YARN"]; * @return This builder for chaining. */ public Builder clearApplicationType() { applicationType_ = getDefaultInstance().getApplicationType(); bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } /** * optional string applicationType = 10 [default = "YARN"]; * @param value The bytes for applicationType to set. * @return This builder for chaining. */ public Builder setApplicationTypeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } applicationType_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } private boolean keepContainersAcrossApplicationAttempts_ ; /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return Whether the keepContainersAcrossApplicationAttempts field is set. */ @java.lang.Override public boolean hasKeepContainersAcrossApplicationAttempts() { return ((bitField0_ & 0x00000400) != 0); } /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return The keepContainersAcrossApplicationAttempts. */ @java.lang.Override public boolean getKeepContainersAcrossApplicationAttempts() { return keepContainersAcrossApplicationAttempts_; } /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @param value The keepContainersAcrossApplicationAttempts to set. * @return This builder for chaining. */ public Builder setKeepContainersAcrossApplicationAttempts(boolean value) { keepContainersAcrossApplicationAttempts_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional bool keep_containers_across_application_attempts = 11 [default = false]; * @return This builder for chaining. */ public Builder clearKeepContainersAcrossApplicationAttempts() { bitField0_ = (bitField0_ & ~0x00000400); keepContainersAcrossApplicationAttempts_ = false; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureApplicationTagsIsMutable() { if (!((bitField0_ & 0x00000800) != 0)) { applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_); bitField0_ |= 0x00000800; } } /** * repeated string applicationTags = 12; * @return A list containing the applicationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getApplicationTagsList() { return applicationTags_.getUnmodifiableView(); } /** * repeated string applicationTags = 12; * @return The count of applicationTags. */ public int getApplicationTagsCount() { return applicationTags_.size(); } /** * repeated string applicationTags = 12; * @param index The index of the element to return. * @return The applicationTags at the given index. */ public java.lang.String getApplicationTags(int index) { return applicationTags_.get(index); } /** * repeated string applicationTags = 12; * @param index The index of the value to return. * @return The bytes of the applicationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getApplicationTagsBytes(int index) { return applicationTags_.getByteString(index); } /** * repeated string applicationTags = 12; * @param index The index to set the value at. * @param value The applicationTags to set. * @return This builder for chaining. */ public Builder setApplicationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.set(index, value); onChanged(); return this; } /** * repeated string applicationTags = 12; * @param value The applicationTags to add. * @return This builder for chaining. */ public Builder addApplicationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } /** * repeated string applicationTags = 12; * @param values The applicationTags to add. * @return This builder for chaining. */ public Builder addAllApplicationTags( java.lang.Iterable values) { ensureApplicationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTags_); onChanged(); return this; } /** * repeated string applicationTags = 12; * @return This builder for chaining. */ public Builder clearApplicationTags() { applicationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000800); onChanged(); return this; } /** * repeated string applicationTags = 12; * @param value The bytes of the applicationTags to add. * @return This builder for chaining. */ public Builder addApplicationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureApplicationTagsIsMutable(); applicationTags_.add(value); onChanged(); return this; } private long attemptFailuresValidityInterval_ = -1L; /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return Whether the attemptFailuresValidityInterval field is set. */ @java.lang.Override public boolean hasAttemptFailuresValidityInterval() { return ((bitField0_ & 0x00001000) != 0); } /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return The attemptFailuresValidityInterval. */ @java.lang.Override public long getAttemptFailuresValidityInterval() { return attemptFailuresValidityInterval_; } /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @param value The attemptFailuresValidityInterval to set. * @return This builder for chaining. */ public Builder setAttemptFailuresValidityInterval(long value) { attemptFailuresValidityInterval_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } /** * optional int64 attempt_failures_validity_interval = 13 [default = -1]; * @return This builder for chaining. */ public Builder clearAttemptFailuresValidityInterval() { bitField0_ = (bitField0_ & ~0x00001000); attemptFailuresValidityInterval_ = -1L; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> logAggregationContextBuilder_; /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return Whether the logAggregationContext field is set. */ public boolean hasLogAggregationContext() { return ((bitField0_ & 0x00002000) != 0); } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; * @return The logAggregationContext. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() { if (logAggregationContextBuilder_ == null) { return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_; } else { return logAggregationContextBuilder_.getMessage(); } } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public Builder setLogAggregationContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) { if (logAggregationContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } logAggregationContext_ = value; } else { logAggregationContextBuilder_.setMessage(value); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public Builder setLogAggregationContext( org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder builderForValue) { if (logAggregationContextBuilder_ == null) { logAggregationContext_ = builderForValue.build(); } else { logAggregationContextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public Builder mergeLogAggregationContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) { if (logAggregationContextBuilder_ == null) { if (((bitField0_ & 0x00002000) != 0) && logAggregationContext_ != null && logAggregationContext_ != org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) { getLogAggregationContextBuilder().mergeFrom(value); } else { logAggregationContext_ = value; } } else { logAggregationContextBuilder_.mergeFrom(value); } bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public Builder clearLogAggregationContext() { bitField0_ = (bitField0_ & ~0x00002000); logAggregationContext_ = null; if (logAggregationContextBuilder_ != null) { logAggregationContextBuilder_.dispose(); logAggregationContextBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder getLogAggregationContextBuilder() { bitField0_ |= 0x00002000; onChanged(); return getLogAggregationContextFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() { if (logAggregationContextBuilder_ != null) { return logAggregationContextBuilder_.getMessageOrBuilder(); } else { return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_; } } /** * optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> getLogAggregationContextFieldBuilder() { if (logAggregationContextBuilder_ == null) { logAggregationContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder>( getLogAggregationContext(), getParentForChildren(), isClean()); logAggregationContext_ = null; } return logAggregationContextBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return Whether the reservationId field is set. */ public boolean hasReservationId() { return ((bitField0_ & 0x00004000) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; * @return The reservationId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public Builder setReservationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public Builder mergeReservationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00004000) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { getReservationIdBuilder().mergeFrom(value); } else { reservationId_ = value; } } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public Builder clearReservationId() { bitField0_ = (bitField0_ & ~0x00004000); reservationId_ = null; if (reservationIdBuilder_ != null) { reservationIdBuilder_.dispose(); reservationIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00004000; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 15; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } private java.lang.Object nodeLabelExpression_ = ""; /** * optional string node_label_expression = 16; * @return Whether the nodeLabelExpression field is set. */ public boolean hasNodeLabelExpression() { return ((bitField0_ & 0x00008000) != 0); } /** * optional string node_label_expression = 16; * @return The nodeLabelExpression. */ public java.lang.String getNodeLabelExpression() { java.lang.Object ref = nodeLabelExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { nodeLabelExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string node_label_expression = 16; * @return The bytes for nodeLabelExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getNodeLabelExpressionBytes() { java.lang.Object ref = nodeLabelExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string node_label_expression = 16; * @param value The nodeLabelExpression to set. * @return This builder for chaining. */ public Builder setNodeLabelExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nodeLabelExpression_ = value; bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional string node_label_expression = 16; * @return This builder for chaining. */ public Builder clearNodeLabelExpression() { nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression(); bitField0_ = (bitField0_ & ~0x00008000); onChanged(); return this; } /** * optional string node_label_expression = 16; * @param value The bytes for nodeLabelExpression to set. * @return This builder for chaining. */ public Builder setNodeLabelExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nodeLabelExpression_ = value; bitField0_ |= 0x00008000; onChanged(); return this; } private java.util.List amContainerResourceRequest_ = java.util.Collections.emptyList(); private void ensureAmContainerResourceRequestIsMutable() { if (!((bitField0_ & 0x00010000) != 0)) { amContainerResourceRequest_ = new java.util.ArrayList(amContainerResourceRequest_); bitField0_ |= 0x00010000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> amContainerResourceRequestBuilder_; /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public java.util.List getAmContainerResourceRequestList() { if (amContainerResourceRequestBuilder_ == null) { return java.util.Collections.unmodifiableList(amContainerResourceRequest_); } else { return amContainerResourceRequestBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public int getAmContainerResourceRequestCount() { if (amContainerResourceRequestBuilder_ == null) { return amContainerResourceRequest_.size(); } else { return amContainerResourceRequestBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index) { if (amContainerResourceRequestBuilder_ == null) { return amContainerResourceRequest_.get(index); } else { return amContainerResourceRequestBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder setAmContainerResourceRequest( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (amContainerResourceRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.set(index, value); onChanged(); } else { amContainerResourceRequestBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder setAmContainerResourceRequest( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.set(index, builderForValue.build()); onChanged(); } else { amContainerResourceRequestBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder addAmContainerResourceRequest(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (amContainerResourceRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.add(value); onChanged(); } else { amContainerResourceRequestBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder addAmContainerResourceRequest( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) { if (amContainerResourceRequestBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.add(index, value); onChanged(); } else { amContainerResourceRequestBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder addAmContainerResourceRequest( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.add(builderForValue.build()); onChanged(); } else { amContainerResourceRequestBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder addAmContainerResourceRequest( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) { if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.add(index, builderForValue.build()); onChanged(); } else { amContainerResourceRequestBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder addAllAmContainerResourceRequest( java.lang.Iterable values) { if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, amContainerResourceRequest_); onChanged(); } else { amContainerResourceRequestBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder clearAmContainerResourceRequest() { if (amContainerResourceRequestBuilder_ == null) { amContainerResourceRequest_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00010000); onChanged(); } else { amContainerResourceRequestBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public Builder removeAmContainerResourceRequest(int index) { if (amContainerResourceRequestBuilder_ == null) { ensureAmContainerResourceRequestIsMutable(); amContainerResourceRequest_.remove(index); onChanged(); } else { amContainerResourceRequestBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getAmContainerResourceRequestBuilder( int index) { return getAmContainerResourceRequestFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder( int index) { if (amContainerResourceRequestBuilder_ == null) { return amContainerResourceRequest_.get(index); } else { return amContainerResourceRequestBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public java.util.List getAmContainerResourceRequestOrBuilderList() { if (amContainerResourceRequestBuilder_ != null) { return amContainerResourceRequestBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(amContainerResourceRequest_); } } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAmContainerResourceRequestBuilder() { return getAmContainerResourceRequestFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAmContainerResourceRequestBuilder( int index) { return getAmContainerResourceRequestFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17; */ public java.util.List getAmContainerResourceRequestBuilderList() { return getAmContainerResourceRequestFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> getAmContainerResourceRequestFieldBuilder() { if (amContainerResourceRequestBuilder_ == null) { amContainerResourceRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>( amContainerResourceRequest_, ((bitField0_ & 0x00010000) != 0), getParentForChildren(), isClean()); amContainerResourceRequest_ = null; } return amContainerResourceRequestBuilder_; } private java.util.List applicationTimeouts_ = java.util.Collections.emptyList(); private void ensureApplicationTimeoutsIsMutable() { if (!((bitField0_ & 0x00020000) != 0)) { applicationTimeouts_ = new java.util.ArrayList(applicationTimeouts_); bitField0_ |= 0x00020000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_; /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public java.util.List getApplicationTimeoutsList() { if (applicationTimeoutsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationTimeouts_); } else { return applicationTimeoutsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public int getApplicationTimeoutsCount() { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.size(); } else { return applicationTimeoutsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, value); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder setApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.set(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder addApplicationTimeouts(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) { if (applicationTimeoutsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, value); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder addApplicationTimeouts( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder addApplicationTimeouts( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.add(index, builderForValue.build()); onChanged(); } else { applicationTimeoutsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder addAllApplicationTimeouts( java.lang.Iterable values) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationTimeouts_); onChanged(); } else { applicationTimeoutsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder clearApplicationTimeouts() { if (applicationTimeoutsBuilder_ == null) { applicationTimeouts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00020000); onChanged(); } else { applicationTimeoutsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public Builder removeApplicationTimeouts(int index) { if (applicationTimeoutsBuilder_ == null) { ensureApplicationTimeoutsIsMutable(); applicationTimeouts_.remove(index); onChanged(); } else { applicationTimeoutsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder getApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder( int index) { if (applicationTimeoutsBuilder_ == null) { return applicationTimeouts_.get(index); } else { return applicationTimeoutsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public java.util.List getApplicationTimeoutsOrBuilderList() { if (applicationTimeoutsBuilder_ != null) { return applicationTimeoutsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationTimeouts_); } } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder addApplicationTimeoutsBuilder() { return getApplicationTimeoutsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder addApplicationTimeoutsBuilder( int index) { return getApplicationTimeoutsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18; */ public java.util.List getApplicationTimeoutsBuilderList() { return getApplicationTimeoutsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> getApplicationTimeoutsFieldBuilder() { if (applicationTimeoutsBuilder_ == null) { applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder>( applicationTimeouts_, ((bitField0_ & 0x00020000) != 0), getParentForChildren(), isClean()); applicationTimeouts_ = null; } return applicationTimeoutsBuilder_; } private java.util.List applicationSchedulingProperties_ = java.util.Collections.emptyList(); private void ensureApplicationSchedulingPropertiesIsMutable() { if (!((bitField0_ & 0x00040000) != 0)) { applicationSchedulingProperties_ = new java.util.ArrayList(applicationSchedulingProperties_); bitField0_ |= 0x00040000; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> applicationSchedulingPropertiesBuilder_; /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public java.util.List getApplicationSchedulingPropertiesList() { if (applicationSchedulingPropertiesBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationSchedulingProperties_); } else { return applicationSchedulingPropertiesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public int getApplicationSchedulingPropertiesCount() { if (applicationSchedulingPropertiesBuilder_ == null) { return applicationSchedulingProperties_.size(); } else { return applicationSchedulingPropertiesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index) { if (applicationSchedulingPropertiesBuilder_ == null) { return applicationSchedulingProperties_.get(index); } else { return applicationSchedulingPropertiesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder setApplicationSchedulingProperties( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (applicationSchedulingPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.set(index, value); onChanged(); } else { applicationSchedulingPropertiesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder setApplicationSchedulingProperties( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.set(index, builderForValue.build()); onChanged(); } else { applicationSchedulingPropertiesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder addApplicationSchedulingProperties(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (applicationSchedulingPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.add(value); onChanged(); } else { applicationSchedulingPropertiesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder addApplicationSchedulingProperties( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (applicationSchedulingPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.add(index, value); onChanged(); } else { applicationSchedulingPropertiesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder addApplicationSchedulingProperties( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.add(builderForValue.build()); onChanged(); } else { applicationSchedulingPropertiesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder addApplicationSchedulingProperties( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.add(index, builderForValue.build()); onChanged(); } else { applicationSchedulingPropertiesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder addAllApplicationSchedulingProperties( java.lang.Iterable values) { if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationSchedulingProperties_); onChanged(); } else { applicationSchedulingPropertiesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder clearApplicationSchedulingProperties() { if (applicationSchedulingPropertiesBuilder_ == null) { applicationSchedulingProperties_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00040000); onChanged(); } else { applicationSchedulingPropertiesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public Builder removeApplicationSchedulingProperties(int index) { if (applicationSchedulingPropertiesBuilder_ == null) { ensureApplicationSchedulingPropertiesIsMutable(); applicationSchedulingProperties_.remove(index); onChanged(); } else { applicationSchedulingPropertiesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getApplicationSchedulingPropertiesBuilder( int index) { return getApplicationSchedulingPropertiesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder( int index) { if (applicationSchedulingPropertiesBuilder_ == null) { return applicationSchedulingProperties_.get(index); } else { return applicationSchedulingPropertiesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public java.util.List getApplicationSchedulingPropertiesOrBuilderList() { if (applicationSchedulingPropertiesBuilder_ != null) { return applicationSchedulingPropertiesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationSchedulingProperties_); } } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addApplicationSchedulingPropertiesBuilder() { return getApplicationSchedulingPropertiesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addApplicationSchedulingPropertiesBuilder( int index) { return getApplicationSchedulingPropertiesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19; */ public java.util.List getApplicationSchedulingPropertiesBuilderList() { return getApplicationSchedulingPropertiesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> getApplicationSchedulingPropertiesFieldBuilder() { if (applicationSchedulingPropertiesBuilder_ == null) { applicationSchedulingPropertiesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>( applicationSchedulingProperties_, ((bitField0_ & 0x00040000) != 0), getParentForChildren(), isClean()); applicationSchedulingProperties_ = null; } return applicationSchedulingPropertiesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationSubmissionContextProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationSubmissionContextProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationSubmissionContextProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationTimeoutMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationTimeoutMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ boolean hasApplicationTimeoutType(); /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType(); /** * optional int64 timeout = 2; * @return Whether the timeout field is set. */ boolean hasTimeout(); /** * optional int64 timeout = 2; * @return The timeout. */ long getTimeout(); } /** * Protobuf type {@code hadoop.yarn.ApplicationTimeoutMapProto} */ public static final class ApplicationTimeoutMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationTimeoutMapProto) ApplicationTimeoutMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationTimeoutMapProto.newBuilder() to construct. private ApplicationTimeoutMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationTimeoutMapProto() { applicationTimeoutType_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationTimeoutMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder.class); } private int bitField0_; public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } public static final int TIMEOUT_FIELD_NUMBER = 2; private long timeout_ = 0L; /** * optional int64 timeout = 2; * @return Whether the timeout field is set. */ @java.lang.Override public boolean hasTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 timeout = 2; * @return The timeout. */ @java.lang.Override public long getTimeout() { return timeout_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, timeout_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, timeout_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto) obj; if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false; if (hasApplicationTimeoutType()) { if (applicationTimeoutType_ != other.applicationTimeoutType_) return false; } if (hasTimeout() != other.hasTimeout()) return false; if (hasTimeout()) { if (getTimeout() != other.getTimeout()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationTimeoutType()) { hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + applicationTimeoutType_; } if (hasTimeout()) { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getTimeout()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationTimeoutMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationTimeoutMapProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationTimeoutType_ = 1; timeout_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationTimeoutType_ = applicationTimeoutType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.timeout_ = timeout_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance()) return this; if (other.hasApplicationTimeoutType()) { setApplicationTimeoutType(other.getApplicationTimeoutType()); } if (other.hasTimeout()) { setTimeout(other.getTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { applicationTimeoutType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 16: { timeout_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @param value The applicationTimeoutType to set. * @return This builder for chaining. */ public Builder setApplicationTimeoutType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; applicationTimeoutType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return This builder for chaining. */ public Builder clearApplicationTimeoutType() { bitField0_ = (bitField0_ & ~0x00000001); applicationTimeoutType_ = 1; onChanged(); return this; } private long timeout_ ; /** * optional int64 timeout = 2; * @return Whether the timeout field is set. */ @java.lang.Override public boolean hasTimeout() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 timeout = 2; * @return The timeout. */ @java.lang.Override public long getTimeout() { return timeout_; } /** * optional int64 timeout = 2; * @param value The timeout to set. * @return This builder for chaining. */ public Builder setTimeout(long value) { timeout_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 timeout = 2; * @return This builder for chaining. */ public Builder clearTimeout() { bitField0_ = (bitField0_ & ~0x00000002); timeout_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationTimeoutMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationTimeoutMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationTimeoutMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationUpdateTimeoutMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationUpdateTimeoutMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ boolean hasApplicationTimeoutType(); /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType(); /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ boolean hasExpireTime(); /** * optional string expire_time = 2; * @return The expireTime. */ java.lang.String getExpireTime(); /** * optional string expire_time = 2; * @return The bytes for expireTime. */ org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes(); } /** * Protobuf type {@code hadoop.yarn.ApplicationUpdateTimeoutMapProto} */ public static final class ApplicationUpdateTimeoutMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationUpdateTimeoutMapProto) ApplicationUpdateTimeoutMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationUpdateTimeoutMapProto.newBuilder() to construct. private ApplicationUpdateTimeoutMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationUpdateTimeoutMapProto() { applicationTimeoutType_ = 1; expireTime_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationUpdateTimeoutMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder.class); } private int bitField0_; public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } public static final int EXPIRE_TIME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object expireTime_ = ""; /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ @java.lang.Override public boolean hasExpireTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string expire_time = 2; * @return The expireTime. */ @java.lang.Override public java.lang.String getExpireTime() { java.lang.Object ref = expireTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expireTime_ = s; } return s; } } /** * optional string expire_time = 2; * @return The bytes for expireTime. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes() { java.lang.Object ref = expireTime_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expireTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, expireTime_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, applicationTimeoutType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, expireTime_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto) obj; if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false; if (hasApplicationTimeoutType()) { if (applicationTimeoutType_ != other.applicationTimeoutType_) return false; } if (hasExpireTime() != other.hasExpireTime()) return false; if (hasExpireTime()) { if (!getExpireTime() .equals(other.getExpireTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApplicationTimeoutType()) { hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER; hash = (53 * hash) + applicationTimeoutType_; } if (hasExpireTime()) { hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER; hash = (53 * hash) + getExpireTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationUpdateTimeoutMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationUpdateTimeoutMapProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; applicationTimeoutType_ = 1; expireTime_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.applicationTimeoutType_ = applicationTimeoutType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.expireTime_ = expireTime_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()) return this; if (other.hasApplicationTimeoutType()) { setApplicationTimeoutType(other.getApplicationTimeoutType()); } if (other.hasExpireTime()) { expireTime_ = other.expireTime_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { applicationTimeoutType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { expireTime_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int applicationTimeoutType_ = 1; /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return Whether the applicationTimeoutType field is set. */ @java.lang.Override public boolean hasApplicationTimeoutType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return The applicationTimeoutType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @param value The applicationTimeoutType to set. * @return This builder for chaining. */ public Builder setApplicationTimeoutType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; applicationTimeoutType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1; * @return This builder for chaining. */ public Builder clearApplicationTimeoutType() { bitField0_ = (bitField0_ & ~0x00000001); applicationTimeoutType_ = 1; onChanged(); return this; } private java.lang.Object expireTime_ = ""; /** * optional string expire_time = 2; * @return Whether the expireTime field is set. */ public boolean hasExpireTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string expire_time = 2; * @return The expireTime. */ public java.lang.String getExpireTime() { java.lang.Object ref = expireTime_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expireTime_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string expire_time = 2; * @return The bytes for expireTime. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExpireTimeBytes() { java.lang.Object ref = expireTime_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expireTime_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string expire_time = 2; * @param value The expireTime to set. * @return This builder for chaining. */ public Builder setExpireTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } expireTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string expire_time = 2; * @return This builder for chaining. */ public Builder clearExpireTime() { expireTime_ = getDefaultInstance().getExpireTime(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string expire_time = 2; * @param value The bytes for expireTime to set. * @return This builder for chaining. */ public Builder setExpireTimeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } expireTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationUpdateTimeoutMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationUpdateTimeoutMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationUpdateTimeoutMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface LogAggregationContextProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.LogAggregationContextProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string include_pattern = 1 [default = ".*"]; * @return Whether the includePattern field is set. */ boolean hasIncludePattern(); /** * optional string include_pattern = 1 [default = ".*"]; * @return The includePattern. */ java.lang.String getIncludePattern(); /** * optional string include_pattern = 1 [default = ".*"]; * @return The bytes for includePattern. */ org.apache.hadoop.thirdparty.protobuf.ByteString getIncludePatternBytes(); /** * optional string exclude_pattern = 2 [default = ""]; * @return Whether the excludePattern field is set. */ boolean hasExcludePattern(); /** * optional string exclude_pattern = 2 [default = ""]; * @return The excludePattern. */ java.lang.String getExcludePattern(); /** * optional string exclude_pattern = 2 [default = ""]; * @return The bytes for excludePattern. */ org.apache.hadoop.thirdparty.protobuf.ByteString getExcludePatternBytes(); /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return Whether the rolledLogsIncludePattern field is set. */ boolean hasRolledLogsIncludePattern(); /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The rolledLogsIncludePattern. */ java.lang.String getRolledLogsIncludePattern(); /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The bytes for rolledLogsIncludePattern. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsIncludePatternBytes(); /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return Whether the rolledLogsExcludePattern field is set. */ boolean hasRolledLogsExcludePattern(); /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The rolledLogsExcludePattern. */ java.lang.String getRolledLogsExcludePattern(); /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The bytes for rolledLogsExcludePattern. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsExcludePatternBytes(); /** * optional string log_aggregation_policy_class_name = 5; * @return Whether the logAggregationPolicyClassName field is set. */ boolean hasLogAggregationPolicyClassName(); /** * optional string log_aggregation_policy_class_name = 5; * @return The logAggregationPolicyClassName. */ java.lang.String getLogAggregationPolicyClassName(); /** * optional string log_aggregation_policy_class_name = 5; * @return The bytes for logAggregationPolicyClassName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyClassNameBytes(); /** * optional string log_aggregation_policy_parameters = 6; * @return Whether the logAggregationPolicyParameters field is set. */ boolean hasLogAggregationPolicyParameters(); /** * optional string log_aggregation_policy_parameters = 6; * @return The logAggregationPolicyParameters. */ java.lang.String getLogAggregationPolicyParameters(); /** * optional string log_aggregation_policy_parameters = 6; * @return The bytes for logAggregationPolicyParameters. */ org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyParametersBytes(); } /** * Protobuf type {@code hadoop.yarn.LogAggregationContextProto} */ public static final class LogAggregationContextProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.LogAggregationContextProto) LogAggregationContextProtoOrBuilder { private static final long serialVersionUID = 0L; // Use LogAggregationContextProto.newBuilder() to construct. private LogAggregationContextProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private LogAggregationContextProto() { includePattern_ = ".*"; excludePattern_ = ""; rolledLogsIncludePattern_ = ""; rolledLogsExcludePattern_ = ".*"; logAggregationPolicyClassName_ = ""; logAggregationPolicyParameters_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LogAggregationContextProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder.class); } private int bitField0_; public static final int INCLUDE_PATTERN_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object includePattern_ = ".*"; /** * optional string include_pattern = 1 [default = ".*"]; * @return Whether the includePattern field is set. */ @java.lang.Override public boolean hasIncludePattern() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string include_pattern = 1 [default = ".*"]; * @return The includePattern. */ @java.lang.Override public java.lang.String getIncludePattern() { java.lang.Object ref = includePattern_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { includePattern_ = s; } return s; } } /** * optional string include_pattern = 1 [default = ".*"]; * @return The bytes for includePattern. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getIncludePatternBytes() { java.lang.Object ref = includePattern_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); includePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int EXCLUDE_PATTERN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object excludePattern_ = ""; /** * optional string exclude_pattern = 2 [default = ""]; * @return Whether the excludePattern field is set. */ @java.lang.Override public boolean hasExcludePattern() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string exclude_pattern = 2 [default = ""]; * @return The excludePattern. */ @java.lang.Override public java.lang.String getExcludePattern() { java.lang.Object ref = excludePattern_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { excludePattern_ = s; } return s; } } /** * optional string exclude_pattern = 2 [default = ""]; * @return The bytes for excludePattern. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExcludePatternBytes() { java.lang.Object ref = excludePattern_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); excludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int ROLLED_LOGS_INCLUDE_PATTERN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object rolledLogsIncludePattern_ = ""; /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return Whether the rolledLogsIncludePattern field is set. */ @java.lang.Override public boolean hasRolledLogsIncludePattern() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The rolledLogsIncludePattern. */ @java.lang.Override public java.lang.String getRolledLogsIncludePattern() { java.lang.Object ref = rolledLogsIncludePattern_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rolledLogsIncludePattern_ = s; } return s; } } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The bytes for rolledLogsIncludePattern. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsIncludePatternBytes() { java.lang.Object ref = rolledLogsIncludePattern_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rolledLogsIncludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int ROLLED_LOGS_EXCLUDE_PATTERN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object rolledLogsExcludePattern_ = ".*"; /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return Whether the rolledLogsExcludePattern field is set. */ @java.lang.Override public boolean hasRolledLogsExcludePattern() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The rolledLogsExcludePattern. */ @java.lang.Override public java.lang.String getRolledLogsExcludePattern() { java.lang.Object ref = rolledLogsExcludePattern_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rolledLogsExcludePattern_ = s; } return s; } } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The bytes for rolledLogsExcludePattern. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsExcludePatternBytes() { java.lang.Object ref = rolledLogsExcludePattern_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rolledLogsExcludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LOG_AGGREGATION_POLICY_CLASS_NAME_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object logAggregationPolicyClassName_ = ""; /** * optional string log_aggregation_policy_class_name = 5; * @return Whether the logAggregationPolicyClassName field is set. */ @java.lang.Override public boolean hasLogAggregationPolicyClassName() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string log_aggregation_policy_class_name = 5; * @return The logAggregationPolicyClassName. */ @java.lang.Override public java.lang.String getLogAggregationPolicyClassName() { java.lang.Object ref = logAggregationPolicyClassName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logAggregationPolicyClassName_ = s; } return s; } } /** * optional string log_aggregation_policy_class_name = 5; * @return The bytes for logAggregationPolicyClassName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyClassNameBytes() { java.lang.Object ref = logAggregationPolicyClassName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logAggregationPolicyClassName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int LOG_AGGREGATION_POLICY_PARAMETERS_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object logAggregationPolicyParameters_ = ""; /** * optional string log_aggregation_policy_parameters = 6; * @return Whether the logAggregationPolicyParameters field is set. */ @java.lang.Override public boolean hasLogAggregationPolicyParameters() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string log_aggregation_policy_parameters = 6; * @return The logAggregationPolicyParameters. */ @java.lang.Override public java.lang.String getLogAggregationPolicyParameters() { java.lang.Object ref = logAggregationPolicyParameters_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logAggregationPolicyParameters_ = s; } return s; } } /** * optional string log_aggregation_policy_parameters = 6; * @return The bytes for logAggregationPolicyParameters. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyParametersBytes() { java.lang.Object ref = logAggregationPolicyParameters_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logAggregationPolicyParameters_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, includePattern_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, excludePattern_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, rolledLogsIncludePattern_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, rolledLogsExcludePattern_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, logAggregationPolicyClassName_); } if (((bitField0_ & 0x00000020) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, logAggregationPolicyParameters_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, includePattern_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, excludePattern_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, rolledLogsIncludePattern_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, rolledLogsExcludePattern_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, logAggregationPolicyClassName_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, logAggregationPolicyParameters_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto) obj; if (hasIncludePattern() != other.hasIncludePattern()) return false; if (hasIncludePattern()) { if (!getIncludePattern() .equals(other.getIncludePattern())) return false; } if (hasExcludePattern() != other.hasExcludePattern()) return false; if (hasExcludePattern()) { if (!getExcludePattern() .equals(other.getExcludePattern())) return false; } if (hasRolledLogsIncludePattern() != other.hasRolledLogsIncludePattern()) return false; if (hasRolledLogsIncludePattern()) { if (!getRolledLogsIncludePattern() .equals(other.getRolledLogsIncludePattern())) return false; } if (hasRolledLogsExcludePattern() != other.hasRolledLogsExcludePattern()) return false; if (hasRolledLogsExcludePattern()) { if (!getRolledLogsExcludePattern() .equals(other.getRolledLogsExcludePattern())) return false; } if (hasLogAggregationPolicyClassName() != other.hasLogAggregationPolicyClassName()) return false; if (hasLogAggregationPolicyClassName()) { if (!getLogAggregationPolicyClassName() .equals(other.getLogAggregationPolicyClassName())) return false; } if (hasLogAggregationPolicyParameters() != other.hasLogAggregationPolicyParameters()) return false; if (hasLogAggregationPolicyParameters()) { if (!getLogAggregationPolicyParameters() .equals(other.getLogAggregationPolicyParameters())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIncludePattern()) { hash = (37 * hash) + INCLUDE_PATTERN_FIELD_NUMBER; hash = (53 * hash) + getIncludePattern().hashCode(); } if (hasExcludePattern()) { hash = (37 * hash) + EXCLUDE_PATTERN_FIELD_NUMBER; hash = (53 * hash) + getExcludePattern().hashCode(); } if (hasRolledLogsIncludePattern()) { hash = (37 * hash) + ROLLED_LOGS_INCLUDE_PATTERN_FIELD_NUMBER; hash = (53 * hash) + getRolledLogsIncludePattern().hashCode(); } if (hasRolledLogsExcludePattern()) { hash = (37 * hash) + ROLLED_LOGS_EXCLUDE_PATTERN_FIELD_NUMBER; hash = (53 * hash) + getRolledLogsExcludePattern().hashCode(); } if (hasLogAggregationPolicyClassName()) { hash = (37 * hash) + LOG_AGGREGATION_POLICY_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getLogAggregationPolicyClassName().hashCode(); } if (hasLogAggregationPolicyParameters()) { hash = (37 * hash) + LOG_AGGREGATION_POLICY_PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getLogAggregationPolicyParameters().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.LogAggregationContextProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.LogAggregationContextProto) org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; includePattern_ = ".*"; excludePattern_ = ""; rolledLogsIncludePattern_ = ""; rolledLogsExcludePattern_ = ".*"; logAggregationPolicyClassName_ = ""; logAggregationPolicyParameters_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto build() { org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.includePattern_ = includePattern_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.excludePattern_ = excludePattern_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.rolledLogsIncludePattern_ = rolledLogsIncludePattern_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.rolledLogsExcludePattern_ = rolledLogsExcludePattern_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.logAggregationPolicyClassName_ = logAggregationPolicyClassName_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.logAggregationPolicyParameters_ = logAggregationPolicyParameters_; to_bitField0_ |= 0x00000020; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) return this; if (other.hasIncludePattern()) { includePattern_ = other.includePattern_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasExcludePattern()) { excludePattern_ = other.excludePattern_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasRolledLogsIncludePattern()) { rolledLogsIncludePattern_ = other.rolledLogsIncludePattern_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasRolledLogsExcludePattern()) { rolledLogsExcludePattern_ = other.rolledLogsExcludePattern_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasLogAggregationPolicyClassName()) { logAggregationPolicyClassName_ = other.logAggregationPolicyClassName_; bitField0_ |= 0x00000010; onChanged(); } if (other.hasLogAggregationPolicyParameters()) { logAggregationPolicyParameters_ = other.logAggregationPolicyParameters_; bitField0_ |= 0x00000020; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { includePattern_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { excludePattern_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { rolledLogsIncludePattern_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { rolledLogsExcludePattern_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { logAggregationPolicyClassName_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 case 50: { logAggregationPolicyParameters_ = input.readBytes(); bitField0_ |= 0x00000020; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object includePattern_ = ".*"; /** * optional string include_pattern = 1 [default = ".*"]; * @return Whether the includePattern field is set. */ public boolean hasIncludePattern() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string include_pattern = 1 [default = ".*"]; * @return The includePattern. */ public java.lang.String getIncludePattern() { java.lang.Object ref = includePattern_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { includePattern_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string include_pattern = 1 [default = ".*"]; * @return The bytes for includePattern. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getIncludePatternBytes() { java.lang.Object ref = includePattern_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); includePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string include_pattern = 1 [default = ".*"]; * @param value The includePattern to set. * @return This builder for chaining. */ public Builder setIncludePattern( java.lang.String value) { if (value == null) { throw new NullPointerException(); } includePattern_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string include_pattern = 1 [default = ".*"]; * @return This builder for chaining. */ public Builder clearIncludePattern() { includePattern_ = getDefaultInstance().getIncludePattern(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string include_pattern = 1 [default = ".*"]; * @param value The bytes for includePattern to set. * @return This builder for chaining. */ public Builder setIncludePatternBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } includePattern_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object excludePattern_ = ""; /** * optional string exclude_pattern = 2 [default = ""]; * @return Whether the excludePattern field is set. */ public boolean hasExcludePattern() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string exclude_pattern = 2 [default = ""]; * @return The excludePattern. */ public java.lang.String getExcludePattern() { java.lang.Object ref = excludePattern_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { excludePattern_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string exclude_pattern = 2 [default = ""]; * @return The bytes for excludePattern. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getExcludePatternBytes() { java.lang.Object ref = excludePattern_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); excludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string exclude_pattern = 2 [default = ""]; * @param value The excludePattern to set. * @return This builder for chaining. */ public Builder setExcludePattern( java.lang.String value) { if (value == null) { throw new NullPointerException(); } excludePattern_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string exclude_pattern = 2 [default = ""]; * @return This builder for chaining. */ public Builder clearExcludePattern() { excludePattern_ = getDefaultInstance().getExcludePattern(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string exclude_pattern = 2 [default = ""]; * @param value The bytes for excludePattern to set. * @return This builder for chaining. */ public Builder setExcludePatternBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } excludePattern_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object rolledLogsIncludePattern_ = ""; /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return Whether the rolledLogsIncludePattern field is set. */ public boolean hasRolledLogsIncludePattern() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The rolledLogsIncludePattern. */ public java.lang.String getRolledLogsIncludePattern() { java.lang.Object ref = rolledLogsIncludePattern_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rolledLogsIncludePattern_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return The bytes for rolledLogsIncludePattern. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsIncludePatternBytes() { java.lang.Object ref = rolledLogsIncludePattern_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rolledLogsIncludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @param value The rolledLogsIncludePattern to set. * @return This builder for chaining. */ public Builder setRolledLogsIncludePattern( java.lang.String value) { if (value == null) { throw new NullPointerException(); } rolledLogsIncludePattern_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @return This builder for chaining. */ public Builder clearRolledLogsIncludePattern() { rolledLogsIncludePattern_ = getDefaultInstance().getRolledLogsIncludePattern(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string rolled_logs_include_pattern = 3 [default = ""]; * @param value The bytes for rolledLogsIncludePattern to set. * @return This builder for chaining. */ public Builder setRolledLogsIncludePatternBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } rolledLogsIncludePattern_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object rolledLogsExcludePattern_ = ".*"; /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return Whether the rolledLogsExcludePattern field is set. */ public boolean hasRolledLogsExcludePattern() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The rolledLogsExcludePattern. */ public java.lang.String getRolledLogsExcludePattern() { java.lang.Object ref = rolledLogsExcludePattern_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rolledLogsExcludePattern_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return The bytes for rolledLogsExcludePattern. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRolledLogsExcludePatternBytes() { java.lang.Object ref = rolledLogsExcludePattern_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rolledLogsExcludePattern_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @param value The rolledLogsExcludePattern to set. * @return This builder for chaining. */ public Builder setRolledLogsExcludePattern( java.lang.String value) { if (value == null) { throw new NullPointerException(); } rolledLogsExcludePattern_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @return This builder for chaining. */ public Builder clearRolledLogsExcludePattern() { rolledLogsExcludePattern_ = getDefaultInstance().getRolledLogsExcludePattern(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string rolled_logs_exclude_pattern = 4 [default = ".*"]; * @param value The bytes for rolledLogsExcludePattern to set. * @return This builder for chaining. */ public Builder setRolledLogsExcludePatternBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } rolledLogsExcludePattern_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object logAggregationPolicyClassName_ = ""; /** * optional string log_aggregation_policy_class_name = 5; * @return Whether the logAggregationPolicyClassName field is set. */ public boolean hasLogAggregationPolicyClassName() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string log_aggregation_policy_class_name = 5; * @return The logAggregationPolicyClassName. */ public java.lang.String getLogAggregationPolicyClassName() { java.lang.Object ref = logAggregationPolicyClassName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logAggregationPolicyClassName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string log_aggregation_policy_class_name = 5; * @return The bytes for logAggregationPolicyClassName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyClassNameBytes() { java.lang.Object ref = logAggregationPolicyClassName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logAggregationPolicyClassName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string log_aggregation_policy_class_name = 5; * @param value The logAggregationPolicyClassName to set. * @return This builder for chaining. */ public Builder setLogAggregationPolicyClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } logAggregationPolicyClassName_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string log_aggregation_policy_class_name = 5; * @return This builder for chaining. */ public Builder clearLogAggregationPolicyClassName() { logAggregationPolicyClassName_ = getDefaultInstance().getLogAggregationPolicyClassName(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string log_aggregation_policy_class_name = 5; * @param value The bytes for logAggregationPolicyClassName to set. * @return This builder for chaining. */ public Builder setLogAggregationPolicyClassNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } logAggregationPolicyClassName_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private java.lang.Object logAggregationPolicyParameters_ = ""; /** * optional string log_aggregation_policy_parameters = 6; * @return Whether the logAggregationPolicyParameters field is set. */ public boolean hasLogAggregationPolicyParameters() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string log_aggregation_policy_parameters = 6; * @return The logAggregationPolicyParameters. */ public java.lang.String getLogAggregationPolicyParameters() { java.lang.Object ref = logAggregationPolicyParameters_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { logAggregationPolicyParameters_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string log_aggregation_policy_parameters = 6; * @return The bytes for logAggregationPolicyParameters. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getLogAggregationPolicyParametersBytes() { java.lang.Object ref = logAggregationPolicyParameters_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); logAggregationPolicyParameters_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string log_aggregation_policy_parameters = 6; * @param value The logAggregationPolicyParameters to set. * @return This builder for chaining. */ public Builder setLogAggregationPolicyParameters( java.lang.String value) { if (value == null) { throw new NullPointerException(); } logAggregationPolicyParameters_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional string log_aggregation_policy_parameters = 6; * @return This builder for chaining. */ public Builder clearLogAggregationPolicyParameters() { logAggregationPolicyParameters_ = getDefaultInstance().getLogAggregationPolicyParameters(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } /** * optional string log_aggregation_policy_parameters = 6; * @param value The bytes for logAggregationPolicyParameters to set. * @return This builder for chaining. */ public Builder setLogAggregationPolicyParametersBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } logAggregationPolicyParameters_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.LogAggregationContextProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.LogAggregationContextProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public LogAggregationContextProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ApplicationACLMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationACLMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return Whether the accessType field is set. */ boolean hasAccessType(); /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return The accessType. */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType(); /** * optional string acl = 2 [default = " "]; * @return Whether the acl field is set. */ boolean hasAcl(); /** * optional string acl = 2 [default = " "]; * @return The acl. */ java.lang.String getAcl(); /** * optional string acl = 2 [default = " "]; * @return The bytes for acl. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAclBytes(); } /** * Protobuf type {@code hadoop.yarn.ApplicationACLMapProto} */ public static final class ApplicationACLMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationACLMapProto) ApplicationACLMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ApplicationACLMapProto.newBuilder() to construct. private ApplicationACLMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ApplicationACLMapProto() { accessType_ = 1; acl_ = " "; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ApplicationACLMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder.class); } private int bitField0_; public static final int ACCESSTYPE_FIELD_NUMBER = 1; private int accessType_ = 1; /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return Whether the accessType field is set. */ @java.lang.Override public boolean hasAccessType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return The accessType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(accessType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.APPACCESS_VIEW_APP : result; } public static final int ACL_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object acl_ = " "; /** * optional string acl = 2 [default = " "]; * @return Whether the acl field is set. */ @java.lang.Override public boolean hasAcl() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string acl = 2 [default = " "]; * @return The acl. */ @java.lang.Override public java.lang.String getAcl() { java.lang.Object ref = acl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { acl_ = s; } return s; } } /** * optional string acl = 2 [default = " "]; * @return The bytes for acl. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAclBytes() { java.lang.Object ref = acl_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); acl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, accessType_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, acl_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, accessType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, acl_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto) obj; if (hasAccessType() != other.hasAccessType()) return false; if (hasAccessType()) { if (accessType_ != other.accessType_) return false; } if (hasAcl() != other.hasAcl()) return false; if (hasAcl()) { if (!getAcl() .equals(other.getAcl())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAccessType()) { hash = (37 * hash) + ACCESSTYPE_FIELD_NUMBER; hash = (53 * hash) + accessType_; } if (hasAcl()) { hash = (37 * hash) + ACL_FIELD_NUMBER; hash = (53 * hash) + getAcl().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ApplicationACLMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationACLMapProto) org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; accessType_ = 1; acl_ = " "; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.accessType_ = accessType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.acl_ = acl_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()) return this; if (other.hasAccessType()) { setAccessType(other.getAccessType()); } if (other.hasAcl()) { acl_ = other.acl_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { accessType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { acl_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int accessType_ = 1; /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return Whether the accessType field is set. */ @java.lang.Override public boolean hasAccessType() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return The accessType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType() { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(accessType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.APPACCESS_VIEW_APP : result; } /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @param value The accessType to set. * @return This builder for chaining. */ public Builder setAccessType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; accessType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1; * @return This builder for chaining. */ public Builder clearAccessType() { bitField0_ = (bitField0_ & ~0x00000001); accessType_ = 1; onChanged(); return this; } private java.lang.Object acl_ = " "; /** * optional string acl = 2 [default = " "]; * @return Whether the acl field is set. */ public boolean hasAcl() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string acl = 2 [default = " "]; * @return The acl. */ public java.lang.String getAcl() { java.lang.Object ref = acl_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { acl_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string acl = 2 [default = " "]; * @return The bytes for acl. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAclBytes() { java.lang.Object ref = acl_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); acl_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string acl = 2 [default = " "]; * @param value The acl to set. * @return This builder for chaining. */ public Builder setAcl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } acl_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string acl = 2 [default = " "]; * @return This builder for chaining. */ public Builder clearAcl() { acl_ = getDefaultInstance().getAcl(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string acl = 2 [default = " "]; * @param value The bytes for acl to set. * @return This builder for chaining. */ public Builder setAclBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } acl_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationACLMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationACLMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ApplicationACLMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface YarnClusterMetricsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.YarnClusterMetricsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int32 num_node_managers = 1; * @return Whether the numNodeManagers field is set. */ boolean hasNumNodeManagers(); /** * optional int32 num_node_managers = 1; * @return The numNodeManagers. */ int getNumNodeManagers(); /** * optional int32 num_decommissioned_nms = 2; * @return Whether the numDecommissionedNms field is set. */ boolean hasNumDecommissionedNms(); /** * optional int32 num_decommissioned_nms = 2; * @return The numDecommissionedNms. */ int getNumDecommissionedNms(); /** * optional int32 num_active_nms = 3; * @return Whether the numActiveNms field is set. */ boolean hasNumActiveNms(); /** * optional int32 num_active_nms = 3; * @return The numActiveNms. */ int getNumActiveNms(); /** * optional int32 num_lost_nms = 4; * @return Whether the numLostNms field is set. */ boolean hasNumLostNms(); /** * optional int32 num_lost_nms = 4; * @return The numLostNms. */ int getNumLostNms(); /** * optional int32 num_unhealthy_nms = 5; * @return Whether the numUnhealthyNms field is set. */ boolean hasNumUnhealthyNms(); /** * optional int32 num_unhealthy_nms = 5; * @return The numUnhealthyNms. */ int getNumUnhealthyNms(); /** * optional int32 num_rebooted_nms = 6; * @return Whether the numRebootedNms field is set. */ boolean hasNumRebootedNms(); /** * optional int32 num_rebooted_nms = 6; * @return The numRebootedNms. */ int getNumRebootedNms(); /** * optional int32 num_decommissioning_nms = 7; * @return Whether the numDecommissioningNms field is set. */ boolean hasNumDecommissioningNms(); /** * optional int32 num_decommissioning_nms = 7; * @return The numDecommissioningNms. */ int getNumDecommissioningNms(); /** * optional int32 num_shutdown_nms = 8; * @return Whether the numShutdownNms field is set. */ boolean hasNumShutdownNms(); /** * optional int32 num_shutdown_nms = 8; * @return The numShutdownNms. */ int getNumShutdownNms(); } /** * Protobuf type {@code hadoop.yarn.YarnClusterMetricsProto} */ public static final class YarnClusterMetricsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.YarnClusterMetricsProto) YarnClusterMetricsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use YarnClusterMetricsProto.newBuilder() to construct. private YarnClusterMetricsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private YarnClusterMetricsProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new YarnClusterMetricsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder.class); } private int bitField0_; public static final int NUM_NODE_MANAGERS_FIELD_NUMBER = 1; private int numNodeManagers_ = 0; /** * optional int32 num_node_managers = 1; * @return Whether the numNodeManagers field is set. */ @java.lang.Override public boolean hasNumNodeManagers() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 num_node_managers = 1; * @return The numNodeManagers. */ @java.lang.Override public int getNumNodeManagers() { return numNodeManagers_; } public static final int NUM_DECOMMISSIONED_NMS_FIELD_NUMBER = 2; private int numDecommissionedNms_ = 0; /** * optional int32 num_decommissioned_nms = 2; * @return Whether the numDecommissionedNms field is set. */ @java.lang.Override public boolean hasNumDecommissionedNms() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_decommissioned_nms = 2; * @return The numDecommissionedNms. */ @java.lang.Override public int getNumDecommissionedNms() { return numDecommissionedNms_; } public static final int NUM_ACTIVE_NMS_FIELD_NUMBER = 3; private int numActiveNms_ = 0; /** * optional int32 num_active_nms = 3; * @return Whether the numActiveNms field is set. */ @java.lang.Override public boolean hasNumActiveNms() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 num_active_nms = 3; * @return The numActiveNms. */ @java.lang.Override public int getNumActiveNms() { return numActiveNms_; } public static final int NUM_LOST_NMS_FIELD_NUMBER = 4; private int numLostNms_ = 0; /** * optional int32 num_lost_nms = 4; * @return Whether the numLostNms field is set. */ @java.lang.Override public boolean hasNumLostNms() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 num_lost_nms = 4; * @return The numLostNms. */ @java.lang.Override public int getNumLostNms() { return numLostNms_; } public static final int NUM_UNHEALTHY_NMS_FIELD_NUMBER = 5; private int numUnhealthyNms_ = 0; /** * optional int32 num_unhealthy_nms = 5; * @return Whether the numUnhealthyNms field is set. */ @java.lang.Override public boolean hasNumUnhealthyNms() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int32 num_unhealthy_nms = 5; * @return The numUnhealthyNms. */ @java.lang.Override public int getNumUnhealthyNms() { return numUnhealthyNms_; } public static final int NUM_REBOOTED_NMS_FIELD_NUMBER = 6; private int numRebootedNms_ = 0; /** * optional int32 num_rebooted_nms = 6; * @return Whether the numRebootedNms field is set. */ @java.lang.Override public boolean hasNumRebootedNms() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 num_rebooted_nms = 6; * @return The numRebootedNms. */ @java.lang.Override public int getNumRebootedNms() { return numRebootedNms_; } public static final int NUM_DECOMMISSIONING_NMS_FIELD_NUMBER = 7; private int numDecommissioningNms_ = 0; /** * optional int32 num_decommissioning_nms = 7; * @return Whether the numDecommissioningNms field is set. */ @java.lang.Override public boolean hasNumDecommissioningNms() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int32 num_decommissioning_nms = 7; * @return The numDecommissioningNms. */ @java.lang.Override public int getNumDecommissioningNms() { return numDecommissioningNms_; } public static final int NUM_SHUTDOWN_NMS_FIELD_NUMBER = 8; private int numShutdownNms_ = 0; /** * optional int32 num_shutdown_nms = 8; * @return Whether the numShutdownNms field is set. */ @java.lang.Override public boolean hasNumShutdownNms() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int32 num_shutdown_nms = 8; * @return The numShutdownNms. */ @java.lang.Override public int getNumShutdownNms() { return numShutdownNms_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, numNodeManagers_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, numDecommissionedNms_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(3, numActiveNms_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt32(4, numLostNms_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeInt32(5, numUnhealthyNms_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt32(6, numRebootedNms_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeInt32(7, numDecommissioningNms_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeInt32(8, numShutdownNms_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(1, numNodeManagers_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, numDecommissionedNms_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, numActiveNms_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, numLostNms_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(5, numUnhealthyNms_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(6, numRebootedNms_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(7, numDecommissioningNms_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(8, numShutdownNms_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto) obj; if (hasNumNodeManagers() != other.hasNumNodeManagers()) return false; if (hasNumNodeManagers()) { if (getNumNodeManagers() != other.getNumNodeManagers()) return false; } if (hasNumDecommissionedNms() != other.hasNumDecommissionedNms()) return false; if (hasNumDecommissionedNms()) { if (getNumDecommissionedNms() != other.getNumDecommissionedNms()) return false; } if (hasNumActiveNms() != other.hasNumActiveNms()) return false; if (hasNumActiveNms()) { if (getNumActiveNms() != other.getNumActiveNms()) return false; } if (hasNumLostNms() != other.hasNumLostNms()) return false; if (hasNumLostNms()) { if (getNumLostNms() != other.getNumLostNms()) return false; } if (hasNumUnhealthyNms() != other.hasNumUnhealthyNms()) return false; if (hasNumUnhealthyNms()) { if (getNumUnhealthyNms() != other.getNumUnhealthyNms()) return false; } if (hasNumRebootedNms() != other.hasNumRebootedNms()) return false; if (hasNumRebootedNms()) { if (getNumRebootedNms() != other.getNumRebootedNms()) return false; } if (hasNumDecommissioningNms() != other.hasNumDecommissioningNms()) return false; if (hasNumDecommissioningNms()) { if (getNumDecommissioningNms() != other.getNumDecommissioningNms()) return false; } if (hasNumShutdownNms() != other.hasNumShutdownNms()) return false; if (hasNumShutdownNms()) { if (getNumShutdownNms() != other.getNumShutdownNms()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNumNodeManagers()) { hash = (37 * hash) + NUM_NODE_MANAGERS_FIELD_NUMBER; hash = (53 * hash) + getNumNodeManagers(); } if (hasNumDecommissionedNms()) { hash = (37 * hash) + NUM_DECOMMISSIONED_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumDecommissionedNms(); } if (hasNumActiveNms()) { hash = (37 * hash) + NUM_ACTIVE_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumActiveNms(); } if (hasNumLostNms()) { hash = (37 * hash) + NUM_LOST_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumLostNms(); } if (hasNumUnhealthyNms()) { hash = (37 * hash) + NUM_UNHEALTHY_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumUnhealthyNms(); } if (hasNumRebootedNms()) { hash = (37 * hash) + NUM_REBOOTED_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumRebootedNms(); } if (hasNumDecommissioningNms()) { hash = (37 * hash) + NUM_DECOMMISSIONING_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumDecommissioningNms(); } if (hasNumShutdownNms()) { hash = (37 * hash) + NUM_SHUTDOWN_NMS_FIELD_NUMBER; hash = (53 * hash) + getNumShutdownNms(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.YarnClusterMetricsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.YarnClusterMetricsProto) org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; numNodeManagers_ = 0; numDecommissionedNms_ = 0; numActiveNms_ = 0; numLostNms_ = 0; numUnhealthyNms_ = 0; numRebootedNms_ = 0; numDecommissioningNms_ = 0; numShutdownNms_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.numNodeManagers_ = numNodeManagers_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.numDecommissionedNms_ = numDecommissionedNms_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.numActiveNms_ = numActiveNms_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.numLostNms_ = numLostNms_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.numUnhealthyNms_ = numUnhealthyNms_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.numRebootedNms_ = numRebootedNms_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.numDecommissioningNms_ = numDecommissioningNms_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.numShutdownNms_ = numShutdownNms_; to_bitField0_ |= 0x00000080; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance()) return this; if (other.hasNumNodeManagers()) { setNumNodeManagers(other.getNumNodeManagers()); } if (other.hasNumDecommissionedNms()) { setNumDecommissionedNms(other.getNumDecommissionedNms()); } if (other.hasNumActiveNms()) { setNumActiveNms(other.getNumActiveNms()); } if (other.hasNumLostNms()) { setNumLostNms(other.getNumLostNms()); } if (other.hasNumUnhealthyNms()) { setNumUnhealthyNms(other.getNumUnhealthyNms()); } if (other.hasNumRebootedNms()) { setNumRebootedNms(other.getNumRebootedNms()); } if (other.hasNumDecommissioningNms()) { setNumDecommissioningNms(other.getNumDecommissioningNms()); } if (other.hasNumShutdownNms()) { setNumShutdownNms(other.getNumShutdownNms()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { numNodeManagers_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { numDecommissionedNms_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { numActiveNms_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { numLostNms_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { numUnhealthyNms_ = input.readInt32(); bitField0_ |= 0x00000010; break; } // case 40 case 48: { numRebootedNms_ = input.readInt32(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { numDecommissioningNms_ = input.readInt32(); bitField0_ |= 0x00000040; break; } // case 56 case 64: { numShutdownNms_ = input.readInt32(); bitField0_ |= 0x00000080; break; } // case 64 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int numNodeManagers_ ; /** * optional int32 num_node_managers = 1; * @return Whether the numNodeManagers field is set. */ @java.lang.Override public boolean hasNumNodeManagers() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int32 num_node_managers = 1; * @return The numNodeManagers. */ @java.lang.Override public int getNumNodeManagers() { return numNodeManagers_; } /** * optional int32 num_node_managers = 1; * @param value The numNodeManagers to set. * @return This builder for chaining. */ public Builder setNumNodeManagers(int value) { numNodeManagers_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int32 num_node_managers = 1; * @return This builder for chaining. */ public Builder clearNumNodeManagers() { bitField0_ = (bitField0_ & ~0x00000001); numNodeManagers_ = 0; onChanged(); return this; } private int numDecommissionedNms_ ; /** * optional int32 num_decommissioned_nms = 2; * @return Whether the numDecommissionedNms field is set. */ @java.lang.Override public boolean hasNumDecommissionedNms() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_decommissioned_nms = 2; * @return The numDecommissionedNms. */ @java.lang.Override public int getNumDecommissionedNms() { return numDecommissionedNms_; } /** * optional int32 num_decommissioned_nms = 2; * @param value The numDecommissionedNms to set. * @return This builder for chaining. */ public Builder setNumDecommissionedNms(int value) { numDecommissionedNms_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 num_decommissioned_nms = 2; * @return This builder for chaining. */ public Builder clearNumDecommissionedNms() { bitField0_ = (bitField0_ & ~0x00000002); numDecommissionedNms_ = 0; onChanged(); return this; } private int numActiveNms_ ; /** * optional int32 num_active_nms = 3; * @return Whether the numActiveNms field is set. */ @java.lang.Override public boolean hasNumActiveNms() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 num_active_nms = 3; * @return The numActiveNms. */ @java.lang.Override public int getNumActiveNms() { return numActiveNms_; } /** * optional int32 num_active_nms = 3; * @param value The numActiveNms to set. * @return This builder for chaining. */ public Builder setNumActiveNms(int value) { numActiveNms_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 num_active_nms = 3; * @return This builder for chaining. */ public Builder clearNumActiveNms() { bitField0_ = (bitField0_ & ~0x00000004); numActiveNms_ = 0; onChanged(); return this; } private int numLostNms_ ; /** * optional int32 num_lost_nms = 4; * @return Whether the numLostNms field is set. */ @java.lang.Override public boolean hasNumLostNms() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 num_lost_nms = 4; * @return The numLostNms. */ @java.lang.Override public int getNumLostNms() { return numLostNms_; } /** * optional int32 num_lost_nms = 4; * @param value The numLostNms to set. * @return This builder for chaining. */ public Builder setNumLostNms(int value) { numLostNms_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int32 num_lost_nms = 4; * @return This builder for chaining. */ public Builder clearNumLostNms() { bitField0_ = (bitField0_ & ~0x00000008); numLostNms_ = 0; onChanged(); return this; } private int numUnhealthyNms_ ; /** * optional int32 num_unhealthy_nms = 5; * @return Whether the numUnhealthyNms field is set. */ @java.lang.Override public boolean hasNumUnhealthyNms() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int32 num_unhealthy_nms = 5; * @return The numUnhealthyNms. */ @java.lang.Override public int getNumUnhealthyNms() { return numUnhealthyNms_; } /** * optional int32 num_unhealthy_nms = 5; * @param value The numUnhealthyNms to set. * @return This builder for chaining. */ public Builder setNumUnhealthyNms(int value) { numUnhealthyNms_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional int32 num_unhealthy_nms = 5; * @return This builder for chaining. */ public Builder clearNumUnhealthyNms() { bitField0_ = (bitField0_ & ~0x00000010); numUnhealthyNms_ = 0; onChanged(); return this; } private int numRebootedNms_ ; /** * optional int32 num_rebooted_nms = 6; * @return Whether the numRebootedNms field is set. */ @java.lang.Override public boolean hasNumRebootedNms() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int32 num_rebooted_nms = 6; * @return The numRebootedNms. */ @java.lang.Override public int getNumRebootedNms() { return numRebootedNms_; } /** * optional int32 num_rebooted_nms = 6; * @param value The numRebootedNms to set. * @return This builder for chaining. */ public Builder setNumRebootedNms(int value) { numRebootedNms_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int32 num_rebooted_nms = 6; * @return This builder for chaining. */ public Builder clearNumRebootedNms() { bitField0_ = (bitField0_ & ~0x00000020); numRebootedNms_ = 0; onChanged(); return this; } private int numDecommissioningNms_ ; /** * optional int32 num_decommissioning_nms = 7; * @return Whether the numDecommissioningNms field is set. */ @java.lang.Override public boolean hasNumDecommissioningNms() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int32 num_decommissioning_nms = 7; * @return The numDecommissioningNms. */ @java.lang.Override public int getNumDecommissioningNms() { return numDecommissioningNms_; } /** * optional int32 num_decommissioning_nms = 7; * @param value The numDecommissioningNms to set. * @return This builder for chaining. */ public Builder setNumDecommissioningNms(int value) { numDecommissioningNms_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional int32 num_decommissioning_nms = 7; * @return This builder for chaining. */ public Builder clearNumDecommissioningNms() { bitField0_ = (bitField0_ & ~0x00000040); numDecommissioningNms_ = 0; onChanged(); return this; } private int numShutdownNms_ ; /** * optional int32 num_shutdown_nms = 8; * @return Whether the numShutdownNms field is set. */ @java.lang.Override public boolean hasNumShutdownNms() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int32 num_shutdown_nms = 8; * @return The numShutdownNms. */ @java.lang.Override public int getNumShutdownNms() { return numShutdownNms_; } /** * optional int32 num_shutdown_nms = 8; * @param value The numShutdownNms to set. * @return This builder for chaining. */ public Builder setNumShutdownNms(int value) { numShutdownNms_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional int32 num_shutdown_nms = 8; * @return This builder for chaining. */ public Builder clearNumShutdownNms() { bitField0_ = (bitField0_ & ~0x00000080); numShutdownNms_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.YarnClusterMetricsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.YarnClusterMetricsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public YarnClusterMetricsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface QueueStatisticsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueStatisticsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int64 numAppsSubmitted = 1; * @return Whether the numAppsSubmitted field is set. */ boolean hasNumAppsSubmitted(); /** * optional int64 numAppsSubmitted = 1; * @return The numAppsSubmitted. */ long getNumAppsSubmitted(); /** * optional int64 numAppsRunning = 2; * @return Whether the numAppsRunning field is set. */ boolean hasNumAppsRunning(); /** * optional int64 numAppsRunning = 2; * @return The numAppsRunning. */ long getNumAppsRunning(); /** * optional int64 numAppsPending = 3; * @return Whether the numAppsPending field is set. */ boolean hasNumAppsPending(); /** * optional int64 numAppsPending = 3; * @return The numAppsPending. */ long getNumAppsPending(); /** * optional int64 numAppsCompleted = 4; * @return Whether the numAppsCompleted field is set. */ boolean hasNumAppsCompleted(); /** * optional int64 numAppsCompleted = 4; * @return The numAppsCompleted. */ long getNumAppsCompleted(); /** * optional int64 numAppsKilled = 5; * @return Whether the numAppsKilled field is set. */ boolean hasNumAppsKilled(); /** * optional int64 numAppsKilled = 5; * @return The numAppsKilled. */ long getNumAppsKilled(); /** * optional int64 numAppsFailed = 6; * @return Whether the numAppsFailed field is set. */ boolean hasNumAppsFailed(); /** * optional int64 numAppsFailed = 6; * @return The numAppsFailed. */ long getNumAppsFailed(); /** * optional int64 numActiveUsers = 7; * @return Whether the numActiveUsers field is set. */ boolean hasNumActiveUsers(); /** * optional int64 numActiveUsers = 7; * @return The numActiveUsers. */ long getNumActiveUsers(); /** * optional int64 availableMemoryMB = 8; * @return Whether the availableMemoryMB field is set. */ boolean hasAvailableMemoryMB(); /** * optional int64 availableMemoryMB = 8; * @return The availableMemoryMB. */ long getAvailableMemoryMB(); /** * optional int64 allocatedMemoryMB = 9; * @return Whether the allocatedMemoryMB field is set. */ boolean hasAllocatedMemoryMB(); /** * optional int64 allocatedMemoryMB = 9; * @return The allocatedMemoryMB. */ long getAllocatedMemoryMB(); /** * optional int64 pendingMemoryMB = 10; * @return Whether the pendingMemoryMB field is set. */ boolean hasPendingMemoryMB(); /** * optional int64 pendingMemoryMB = 10; * @return The pendingMemoryMB. */ long getPendingMemoryMB(); /** * optional int64 reservedMemoryMB = 11; * @return Whether the reservedMemoryMB field is set. */ boolean hasReservedMemoryMB(); /** * optional int64 reservedMemoryMB = 11; * @return The reservedMemoryMB. */ long getReservedMemoryMB(); /** * optional int64 availableVCores = 12; * @return Whether the availableVCores field is set. */ boolean hasAvailableVCores(); /** * optional int64 availableVCores = 12; * @return The availableVCores. */ long getAvailableVCores(); /** * optional int64 allocatedVCores = 13; * @return Whether the allocatedVCores field is set. */ boolean hasAllocatedVCores(); /** * optional int64 allocatedVCores = 13; * @return The allocatedVCores. */ long getAllocatedVCores(); /** * optional int64 pendingVCores = 14; * @return Whether the pendingVCores field is set. */ boolean hasPendingVCores(); /** * optional int64 pendingVCores = 14; * @return The pendingVCores. */ long getPendingVCores(); /** * optional int64 reservedVCores = 15; * @return Whether the reservedVCores field is set. */ boolean hasReservedVCores(); /** * optional int64 reservedVCores = 15; * @return The reservedVCores. */ long getReservedVCores(); /** * optional int64 allocatedContainers = 16; * @return Whether the allocatedContainers field is set. */ boolean hasAllocatedContainers(); /** * optional int64 allocatedContainers = 16; * @return The allocatedContainers. */ long getAllocatedContainers(); /** * optional int64 pendingContainers = 17; * @return Whether the pendingContainers field is set. */ boolean hasPendingContainers(); /** * optional int64 pendingContainers = 17; * @return The pendingContainers. */ long getPendingContainers(); /** * optional int64 reservedContainers = 18; * @return Whether the reservedContainers field is set. */ boolean hasReservedContainers(); /** * optional int64 reservedContainers = 18; * @return The reservedContainers. */ long getReservedContainers(); } /** * Protobuf type {@code hadoop.yarn.QueueStatisticsProto} */ public static final class QueueStatisticsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueStatisticsProto) QueueStatisticsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use QueueStatisticsProto.newBuilder() to construct. private QueueStatisticsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private QueueStatisticsProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new QueueStatisticsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder.class); } private int bitField0_; public static final int NUMAPPSSUBMITTED_FIELD_NUMBER = 1; private long numAppsSubmitted_ = 0L; /** * optional int64 numAppsSubmitted = 1; * @return Whether the numAppsSubmitted field is set. */ @java.lang.Override public boolean hasNumAppsSubmitted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 numAppsSubmitted = 1; * @return The numAppsSubmitted. */ @java.lang.Override public long getNumAppsSubmitted() { return numAppsSubmitted_; } public static final int NUMAPPSRUNNING_FIELD_NUMBER = 2; private long numAppsRunning_ = 0L; /** * optional int64 numAppsRunning = 2; * @return Whether the numAppsRunning field is set. */ @java.lang.Override public boolean hasNumAppsRunning() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 numAppsRunning = 2; * @return The numAppsRunning. */ @java.lang.Override public long getNumAppsRunning() { return numAppsRunning_; } public static final int NUMAPPSPENDING_FIELD_NUMBER = 3; private long numAppsPending_ = 0L; /** * optional int64 numAppsPending = 3; * @return Whether the numAppsPending field is set. */ @java.lang.Override public boolean hasNumAppsPending() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 numAppsPending = 3; * @return The numAppsPending. */ @java.lang.Override public long getNumAppsPending() { return numAppsPending_; } public static final int NUMAPPSCOMPLETED_FIELD_NUMBER = 4; private long numAppsCompleted_ = 0L; /** * optional int64 numAppsCompleted = 4; * @return Whether the numAppsCompleted field is set. */ @java.lang.Override public boolean hasNumAppsCompleted() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 numAppsCompleted = 4; * @return The numAppsCompleted. */ @java.lang.Override public long getNumAppsCompleted() { return numAppsCompleted_; } public static final int NUMAPPSKILLED_FIELD_NUMBER = 5; private long numAppsKilled_ = 0L; /** * optional int64 numAppsKilled = 5; * @return Whether the numAppsKilled field is set. */ @java.lang.Override public boolean hasNumAppsKilled() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 numAppsKilled = 5; * @return The numAppsKilled. */ @java.lang.Override public long getNumAppsKilled() { return numAppsKilled_; } public static final int NUMAPPSFAILED_FIELD_NUMBER = 6; private long numAppsFailed_ = 0L; /** * optional int64 numAppsFailed = 6; * @return Whether the numAppsFailed field is set. */ @java.lang.Override public boolean hasNumAppsFailed() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 numAppsFailed = 6; * @return The numAppsFailed. */ @java.lang.Override public long getNumAppsFailed() { return numAppsFailed_; } public static final int NUMACTIVEUSERS_FIELD_NUMBER = 7; private long numActiveUsers_ = 0L; /** * optional int64 numActiveUsers = 7; * @return Whether the numActiveUsers field is set. */ @java.lang.Override public boolean hasNumActiveUsers() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 numActiveUsers = 7; * @return The numActiveUsers. */ @java.lang.Override public long getNumActiveUsers() { return numActiveUsers_; } public static final int AVAILABLEMEMORYMB_FIELD_NUMBER = 8; private long availableMemoryMB_ = 0L; /** * optional int64 availableMemoryMB = 8; * @return Whether the availableMemoryMB field is set. */ @java.lang.Override public boolean hasAvailableMemoryMB() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 availableMemoryMB = 8; * @return The availableMemoryMB. */ @java.lang.Override public long getAvailableMemoryMB() { return availableMemoryMB_; } public static final int ALLOCATEDMEMORYMB_FIELD_NUMBER = 9; private long allocatedMemoryMB_ = 0L; /** * optional int64 allocatedMemoryMB = 9; * @return Whether the allocatedMemoryMB field is set. */ @java.lang.Override public boolean hasAllocatedMemoryMB() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 allocatedMemoryMB = 9; * @return The allocatedMemoryMB. */ @java.lang.Override public long getAllocatedMemoryMB() { return allocatedMemoryMB_; } public static final int PENDINGMEMORYMB_FIELD_NUMBER = 10; private long pendingMemoryMB_ = 0L; /** * optional int64 pendingMemoryMB = 10; * @return Whether the pendingMemoryMB field is set. */ @java.lang.Override public boolean hasPendingMemoryMB() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 pendingMemoryMB = 10; * @return The pendingMemoryMB. */ @java.lang.Override public long getPendingMemoryMB() { return pendingMemoryMB_; } public static final int RESERVEDMEMORYMB_FIELD_NUMBER = 11; private long reservedMemoryMB_ = 0L; /** * optional int64 reservedMemoryMB = 11; * @return Whether the reservedMemoryMB field is set. */ @java.lang.Override public boolean hasReservedMemoryMB() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 reservedMemoryMB = 11; * @return The reservedMemoryMB. */ @java.lang.Override public long getReservedMemoryMB() { return reservedMemoryMB_; } public static final int AVAILABLEVCORES_FIELD_NUMBER = 12; private long availableVCores_ = 0L; /** * optional int64 availableVCores = 12; * @return Whether the availableVCores field is set. */ @java.lang.Override public boolean hasAvailableVCores() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int64 availableVCores = 12; * @return The availableVCores. */ @java.lang.Override public long getAvailableVCores() { return availableVCores_; } public static final int ALLOCATEDVCORES_FIELD_NUMBER = 13; private long allocatedVCores_ = 0L; /** * optional int64 allocatedVCores = 13; * @return Whether the allocatedVCores field is set. */ @java.lang.Override public boolean hasAllocatedVCores() { return ((bitField0_ & 0x00001000) != 0); } /** * optional int64 allocatedVCores = 13; * @return The allocatedVCores. */ @java.lang.Override public long getAllocatedVCores() { return allocatedVCores_; } public static final int PENDINGVCORES_FIELD_NUMBER = 14; private long pendingVCores_ = 0L; /** * optional int64 pendingVCores = 14; * @return Whether the pendingVCores field is set. */ @java.lang.Override public boolean hasPendingVCores() { return ((bitField0_ & 0x00002000) != 0); } /** * optional int64 pendingVCores = 14; * @return The pendingVCores. */ @java.lang.Override public long getPendingVCores() { return pendingVCores_; } public static final int RESERVEDVCORES_FIELD_NUMBER = 15; private long reservedVCores_ = 0L; /** * optional int64 reservedVCores = 15; * @return Whether the reservedVCores field is set. */ @java.lang.Override public boolean hasReservedVCores() { return ((bitField0_ & 0x00004000) != 0); } /** * optional int64 reservedVCores = 15; * @return The reservedVCores. */ @java.lang.Override public long getReservedVCores() { return reservedVCores_; } public static final int ALLOCATEDCONTAINERS_FIELD_NUMBER = 16; private long allocatedContainers_ = 0L; /** * optional int64 allocatedContainers = 16; * @return Whether the allocatedContainers field is set. */ @java.lang.Override public boolean hasAllocatedContainers() { return ((bitField0_ & 0x00008000) != 0); } /** * optional int64 allocatedContainers = 16; * @return The allocatedContainers. */ @java.lang.Override public long getAllocatedContainers() { return allocatedContainers_; } public static final int PENDINGCONTAINERS_FIELD_NUMBER = 17; private long pendingContainers_ = 0L; /** * optional int64 pendingContainers = 17; * @return Whether the pendingContainers field is set. */ @java.lang.Override public boolean hasPendingContainers() { return ((bitField0_ & 0x00010000) != 0); } /** * optional int64 pendingContainers = 17; * @return The pendingContainers. */ @java.lang.Override public long getPendingContainers() { return pendingContainers_; } public static final int RESERVEDCONTAINERS_FIELD_NUMBER = 18; private long reservedContainers_ = 0L; /** * optional int64 reservedContainers = 18; * @return Whether the reservedContainers field is set. */ @java.lang.Override public boolean hasReservedContainers() { return ((bitField0_ & 0x00020000) != 0); } /** * optional int64 reservedContainers = 18; * @return The reservedContainers. */ @java.lang.Override public long getReservedContainers() { return reservedContainers_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(1, numAppsSubmitted_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, numAppsRunning_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(3, numAppsPending_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt64(4, numAppsCompleted_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeInt64(5, numAppsKilled_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt64(6, numAppsFailed_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeInt64(7, numActiveUsers_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeInt64(8, availableMemoryMB_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeInt64(9, allocatedMemoryMB_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeInt64(10, pendingMemoryMB_); } if (((bitField0_ & 0x00000400) != 0)) { output.writeInt64(11, reservedMemoryMB_); } if (((bitField0_ & 0x00000800) != 0)) { output.writeInt64(12, availableVCores_); } if (((bitField0_ & 0x00001000) != 0)) { output.writeInt64(13, allocatedVCores_); } if (((bitField0_ & 0x00002000) != 0)) { output.writeInt64(14, pendingVCores_); } if (((bitField0_ & 0x00004000) != 0)) { output.writeInt64(15, reservedVCores_); } if (((bitField0_ & 0x00008000) != 0)) { output.writeInt64(16, allocatedContainers_); } if (((bitField0_ & 0x00010000) != 0)) { output.writeInt64(17, pendingContainers_); } if (((bitField0_ & 0x00020000) != 0)) { output.writeInt64(18, reservedContainers_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(1, numAppsSubmitted_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, numAppsRunning_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, numAppsPending_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(4, numAppsCompleted_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(5, numAppsKilled_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(6, numAppsFailed_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(7, numActiveUsers_); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(8, availableMemoryMB_); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(9, allocatedMemoryMB_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(10, pendingMemoryMB_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(11, reservedMemoryMB_); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(12, availableVCores_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(13, allocatedVCores_); } if (((bitField0_ & 0x00002000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(14, pendingVCores_); } if (((bitField0_ & 0x00004000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(15, reservedVCores_); } if (((bitField0_ & 0x00008000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(16, allocatedContainers_); } if (((bitField0_ & 0x00010000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(17, pendingContainers_); } if (((bitField0_ & 0x00020000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(18, reservedContainers_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto) obj; if (hasNumAppsSubmitted() != other.hasNumAppsSubmitted()) return false; if (hasNumAppsSubmitted()) { if (getNumAppsSubmitted() != other.getNumAppsSubmitted()) return false; } if (hasNumAppsRunning() != other.hasNumAppsRunning()) return false; if (hasNumAppsRunning()) { if (getNumAppsRunning() != other.getNumAppsRunning()) return false; } if (hasNumAppsPending() != other.hasNumAppsPending()) return false; if (hasNumAppsPending()) { if (getNumAppsPending() != other.getNumAppsPending()) return false; } if (hasNumAppsCompleted() != other.hasNumAppsCompleted()) return false; if (hasNumAppsCompleted()) { if (getNumAppsCompleted() != other.getNumAppsCompleted()) return false; } if (hasNumAppsKilled() != other.hasNumAppsKilled()) return false; if (hasNumAppsKilled()) { if (getNumAppsKilled() != other.getNumAppsKilled()) return false; } if (hasNumAppsFailed() != other.hasNumAppsFailed()) return false; if (hasNumAppsFailed()) { if (getNumAppsFailed() != other.getNumAppsFailed()) return false; } if (hasNumActiveUsers() != other.hasNumActiveUsers()) return false; if (hasNumActiveUsers()) { if (getNumActiveUsers() != other.getNumActiveUsers()) return false; } if (hasAvailableMemoryMB() != other.hasAvailableMemoryMB()) return false; if (hasAvailableMemoryMB()) { if (getAvailableMemoryMB() != other.getAvailableMemoryMB()) return false; } if (hasAllocatedMemoryMB() != other.hasAllocatedMemoryMB()) return false; if (hasAllocatedMemoryMB()) { if (getAllocatedMemoryMB() != other.getAllocatedMemoryMB()) return false; } if (hasPendingMemoryMB() != other.hasPendingMemoryMB()) return false; if (hasPendingMemoryMB()) { if (getPendingMemoryMB() != other.getPendingMemoryMB()) return false; } if (hasReservedMemoryMB() != other.hasReservedMemoryMB()) return false; if (hasReservedMemoryMB()) { if (getReservedMemoryMB() != other.getReservedMemoryMB()) return false; } if (hasAvailableVCores() != other.hasAvailableVCores()) return false; if (hasAvailableVCores()) { if (getAvailableVCores() != other.getAvailableVCores()) return false; } if (hasAllocatedVCores() != other.hasAllocatedVCores()) return false; if (hasAllocatedVCores()) { if (getAllocatedVCores() != other.getAllocatedVCores()) return false; } if (hasPendingVCores() != other.hasPendingVCores()) return false; if (hasPendingVCores()) { if (getPendingVCores() != other.getPendingVCores()) return false; } if (hasReservedVCores() != other.hasReservedVCores()) return false; if (hasReservedVCores()) { if (getReservedVCores() != other.getReservedVCores()) return false; } if (hasAllocatedContainers() != other.hasAllocatedContainers()) return false; if (hasAllocatedContainers()) { if (getAllocatedContainers() != other.getAllocatedContainers()) return false; } if (hasPendingContainers() != other.hasPendingContainers()) return false; if (hasPendingContainers()) { if (getPendingContainers() != other.getPendingContainers()) return false; } if (hasReservedContainers() != other.hasReservedContainers()) return false; if (hasReservedContainers()) { if (getReservedContainers() != other.getReservedContainers()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNumAppsSubmitted()) { hash = (37 * hash) + NUMAPPSSUBMITTED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsSubmitted()); } if (hasNumAppsRunning()) { hash = (37 * hash) + NUMAPPSRUNNING_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsRunning()); } if (hasNumAppsPending()) { hash = (37 * hash) + NUMAPPSPENDING_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsPending()); } if (hasNumAppsCompleted()) { hash = (37 * hash) + NUMAPPSCOMPLETED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsCompleted()); } if (hasNumAppsKilled()) { hash = (37 * hash) + NUMAPPSKILLED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsKilled()); } if (hasNumAppsFailed()) { hash = (37 * hash) + NUMAPPSFAILED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumAppsFailed()); } if (hasNumActiveUsers()) { hash = (37 * hash) + NUMACTIVEUSERS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getNumActiveUsers()); } if (hasAvailableMemoryMB()) { hash = (37 * hash) + AVAILABLEMEMORYMB_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAvailableMemoryMB()); } if (hasAllocatedMemoryMB()) { hash = (37 * hash) + ALLOCATEDMEMORYMB_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocatedMemoryMB()); } if (hasPendingMemoryMB()) { hash = (37 * hash) + PENDINGMEMORYMB_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getPendingMemoryMB()); } if (hasReservedMemoryMB()) { hash = (37 * hash) + RESERVEDMEMORYMB_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getReservedMemoryMB()); } if (hasAvailableVCores()) { hash = (37 * hash) + AVAILABLEVCORES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAvailableVCores()); } if (hasAllocatedVCores()) { hash = (37 * hash) + ALLOCATEDVCORES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocatedVCores()); } if (hasPendingVCores()) { hash = (37 * hash) + PENDINGVCORES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getPendingVCores()); } if (hasReservedVCores()) { hash = (37 * hash) + RESERVEDVCORES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getReservedVCores()); } if (hasAllocatedContainers()) { hash = (37 * hash) + ALLOCATEDCONTAINERS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAllocatedContainers()); } if (hasPendingContainers()) { hash = (37 * hash) + PENDINGCONTAINERS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getPendingContainers()); } if (hasReservedContainers()) { hash = (37 * hash) + RESERVEDCONTAINERS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getReservedContainers()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.QueueStatisticsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueStatisticsProto) org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; numAppsSubmitted_ = 0L; numAppsRunning_ = 0L; numAppsPending_ = 0L; numAppsCompleted_ = 0L; numAppsKilled_ = 0L; numAppsFailed_ = 0L; numActiveUsers_ = 0L; availableMemoryMB_ = 0L; allocatedMemoryMB_ = 0L; pendingMemoryMB_ = 0L; reservedMemoryMB_ = 0L; availableVCores_ = 0L; allocatedVCores_ = 0L; pendingVCores_ = 0L; reservedVCores_ = 0L; allocatedContainers_ = 0L; pendingContainers_ = 0L; reservedContainers_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.numAppsSubmitted_ = numAppsSubmitted_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.numAppsRunning_ = numAppsRunning_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.numAppsPending_ = numAppsPending_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.numAppsCompleted_ = numAppsCompleted_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.numAppsKilled_ = numAppsKilled_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.numAppsFailed_ = numAppsFailed_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.numActiveUsers_ = numActiveUsers_; to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.availableMemoryMB_ = availableMemoryMB_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.allocatedMemoryMB_ = allocatedMemoryMB_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00000200) != 0)) { result.pendingMemoryMB_ = pendingMemoryMB_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00000400) != 0)) { result.reservedMemoryMB_ = reservedMemoryMB_; to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00000800) != 0)) { result.availableVCores_ = availableVCores_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00001000) != 0)) { result.allocatedVCores_ = allocatedVCores_; to_bitField0_ |= 0x00001000; } if (((from_bitField0_ & 0x00002000) != 0)) { result.pendingVCores_ = pendingVCores_; to_bitField0_ |= 0x00002000; } if (((from_bitField0_ & 0x00004000) != 0)) { result.reservedVCores_ = reservedVCores_; to_bitField0_ |= 0x00004000; } if (((from_bitField0_ & 0x00008000) != 0)) { result.allocatedContainers_ = allocatedContainers_; to_bitField0_ |= 0x00008000; } if (((from_bitField0_ & 0x00010000) != 0)) { result.pendingContainers_ = pendingContainers_; to_bitField0_ |= 0x00010000; } if (((from_bitField0_ & 0x00020000) != 0)) { result.reservedContainers_ = reservedContainers_; to_bitField0_ |= 0x00020000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance()) return this; if (other.hasNumAppsSubmitted()) { setNumAppsSubmitted(other.getNumAppsSubmitted()); } if (other.hasNumAppsRunning()) { setNumAppsRunning(other.getNumAppsRunning()); } if (other.hasNumAppsPending()) { setNumAppsPending(other.getNumAppsPending()); } if (other.hasNumAppsCompleted()) { setNumAppsCompleted(other.getNumAppsCompleted()); } if (other.hasNumAppsKilled()) { setNumAppsKilled(other.getNumAppsKilled()); } if (other.hasNumAppsFailed()) { setNumAppsFailed(other.getNumAppsFailed()); } if (other.hasNumActiveUsers()) { setNumActiveUsers(other.getNumActiveUsers()); } if (other.hasAvailableMemoryMB()) { setAvailableMemoryMB(other.getAvailableMemoryMB()); } if (other.hasAllocatedMemoryMB()) { setAllocatedMemoryMB(other.getAllocatedMemoryMB()); } if (other.hasPendingMemoryMB()) { setPendingMemoryMB(other.getPendingMemoryMB()); } if (other.hasReservedMemoryMB()) { setReservedMemoryMB(other.getReservedMemoryMB()); } if (other.hasAvailableVCores()) { setAvailableVCores(other.getAvailableVCores()); } if (other.hasAllocatedVCores()) { setAllocatedVCores(other.getAllocatedVCores()); } if (other.hasPendingVCores()) { setPendingVCores(other.getPendingVCores()); } if (other.hasReservedVCores()) { setReservedVCores(other.getReservedVCores()); } if (other.hasAllocatedContainers()) { setAllocatedContainers(other.getAllocatedContainers()); } if (other.hasPendingContainers()) { setPendingContainers(other.getPendingContainers()); } if (other.hasReservedContainers()) { setReservedContainers(other.getReservedContainers()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { numAppsSubmitted_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { numAppsRunning_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { numAppsPending_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { numAppsCompleted_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { numAppsKilled_ = input.readInt64(); bitField0_ |= 0x00000010; break; } // case 40 case 48: { numAppsFailed_ = input.readInt64(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { numActiveUsers_ = input.readInt64(); bitField0_ |= 0x00000040; break; } // case 56 case 64: { availableMemoryMB_ = input.readInt64(); bitField0_ |= 0x00000080; break; } // case 64 case 72: { allocatedMemoryMB_ = input.readInt64(); bitField0_ |= 0x00000100; break; } // case 72 case 80: { pendingMemoryMB_ = input.readInt64(); bitField0_ |= 0x00000200; break; } // case 80 case 88: { reservedMemoryMB_ = input.readInt64(); bitField0_ |= 0x00000400; break; } // case 88 case 96: { availableVCores_ = input.readInt64(); bitField0_ |= 0x00000800; break; } // case 96 case 104: { allocatedVCores_ = input.readInt64(); bitField0_ |= 0x00001000; break; } // case 104 case 112: { pendingVCores_ = input.readInt64(); bitField0_ |= 0x00002000; break; } // case 112 case 120: { reservedVCores_ = input.readInt64(); bitField0_ |= 0x00004000; break; } // case 120 case 128: { allocatedContainers_ = input.readInt64(); bitField0_ |= 0x00008000; break; } // case 128 case 136: { pendingContainers_ = input.readInt64(); bitField0_ |= 0x00010000; break; } // case 136 case 144: { reservedContainers_ = input.readInt64(); bitField0_ |= 0x00020000; break; } // case 144 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long numAppsSubmitted_ ; /** * optional int64 numAppsSubmitted = 1; * @return Whether the numAppsSubmitted field is set. */ @java.lang.Override public boolean hasNumAppsSubmitted() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 numAppsSubmitted = 1; * @return The numAppsSubmitted. */ @java.lang.Override public long getNumAppsSubmitted() { return numAppsSubmitted_; } /** * optional int64 numAppsSubmitted = 1; * @param value The numAppsSubmitted to set. * @return This builder for chaining. */ public Builder setNumAppsSubmitted(long value) { numAppsSubmitted_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int64 numAppsSubmitted = 1; * @return This builder for chaining. */ public Builder clearNumAppsSubmitted() { bitField0_ = (bitField0_ & ~0x00000001); numAppsSubmitted_ = 0L; onChanged(); return this; } private long numAppsRunning_ ; /** * optional int64 numAppsRunning = 2; * @return Whether the numAppsRunning field is set. */ @java.lang.Override public boolean hasNumAppsRunning() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 numAppsRunning = 2; * @return The numAppsRunning. */ @java.lang.Override public long getNumAppsRunning() { return numAppsRunning_; } /** * optional int64 numAppsRunning = 2; * @param value The numAppsRunning to set. * @return This builder for chaining. */ public Builder setNumAppsRunning(long value) { numAppsRunning_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 numAppsRunning = 2; * @return This builder for chaining. */ public Builder clearNumAppsRunning() { bitField0_ = (bitField0_ & ~0x00000002); numAppsRunning_ = 0L; onChanged(); return this; } private long numAppsPending_ ; /** * optional int64 numAppsPending = 3; * @return Whether the numAppsPending field is set. */ @java.lang.Override public boolean hasNumAppsPending() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 numAppsPending = 3; * @return The numAppsPending. */ @java.lang.Override public long getNumAppsPending() { return numAppsPending_; } /** * optional int64 numAppsPending = 3; * @param value The numAppsPending to set. * @return This builder for chaining. */ public Builder setNumAppsPending(long value) { numAppsPending_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 numAppsPending = 3; * @return This builder for chaining. */ public Builder clearNumAppsPending() { bitField0_ = (bitField0_ & ~0x00000004); numAppsPending_ = 0L; onChanged(); return this; } private long numAppsCompleted_ ; /** * optional int64 numAppsCompleted = 4; * @return Whether the numAppsCompleted field is set. */ @java.lang.Override public boolean hasNumAppsCompleted() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 numAppsCompleted = 4; * @return The numAppsCompleted. */ @java.lang.Override public long getNumAppsCompleted() { return numAppsCompleted_; } /** * optional int64 numAppsCompleted = 4; * @param value The numAppsCompleted to set. * @return This builder for chaining. */ public Builder setNumAppsCompleted(long value) { numAppsCompleted_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int64 numAppsCompleted = 4; * @return This builder for chaining. */ public Builder clearNumAppsCompleted() { bitField0_ = (bitField0_ & ~0x00000008); numAppsCompleted_ = 0L; onChanged(); return this; } private long numAppsKilled_ ; /** * optional int64 numAppsKilled = 5; * @return Whether the numAppsKilled field is set. */ @java.lang.Override public boolean hasNumAppsKilled() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 numAppsKilled = 5; * @return The numAppsKilled. */ @java.lang.Override public long getNumAppsKilled() { return numAppsKilled_; } /** * optional int64 numAppsKilled = 5; * @param value The numAppsKilled to set. * @return This builder for chaining. */ public Builder setNumAppsKilled(long value) { numAppsKilled_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional int64 numAppsKilled = 5; * @return This builder for chaining. */ public Builder clearNumAppsKilled() { bitField0_ = (bitField0_ & ~0x00000010); numAppsKilled_ = 0L; onChanged(); return this; } private long numAppsFailed_ ; /** * optional int64 numAppsFailed = 6; * @return Whether the numAppsFailed field is set. */ @java.lang.Override public boolean hasNumAppsFailed() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 numAppsFailed = 6; * @return The numAppsFailed. */ @java.lang.Override public long getNumAppsFailed() { return numAppsFailed_; } /** * optional int64 numAppsFailed = 6; * @param value The numAppsFailed to set. * @return This builder for chaining. */ public Builder setNumAppsFailed(long value) { numAppsFailed_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional int64 numAppsFailed = 6; * @return This builder for chaining. */ public Builder clearNumAppsFailed() { bitField0_ = (bitField0_ & ~0x00000020); numAppsFailed_ = 0L; onChanged(); return this; } private long numActiveUsers_ ; /** * optional int64 numActiveUsers = 7; * @return Whether the numActiveUsers field is set. */ @java.lang.Override public boolean hasNumActiveUsers() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 numActiveUsers = 7; * @return The numActiveUsers. */ @java.lang.Override public long getNumActiveUsers() { return numActiveUsers_; } /** * optional int64 numActiveUsers = 7; * @param value The numActiveUsers to set. * @return This builder for chaining. */ public Builder setNumActiveUsers(long value) { numActiveUsers_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional int64 numActiveUsers = 7; * @return This builder for chaining. */ public Builder clearNumActiveUsers() { bitField0_ = (bitField0_ & ~0x00000040); numActiveUsers_ = 0L; onChanged(); return this; } private long availableMemoryMB_ ; /** * optional int64 availableMemoryMB = 8; * @return Whether the availableMemoryMB field is set. */ @java.lang.Override public boolean hasAvailableMemoryMB() { return ((bitField0_ & 0x00000080) != 0); } /** * optional int64 availableMemoryMB = 8; * @return The availableMemoryMB. */ @java.lang.Override public long getAvailableMemoryMB() { return availableMemoryMB_; } /** * optional int64 availableMemoryMB = 8; * @param value The availableMemoryMB to set. * @return This builder for chaining. */ public Builder setAvailableMemoryMB(long value) { availableMemoryMB_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional int64 availableMemoryMB = 8; * @return This builder for chaining. */ public Builder clearAvailableMemoryMB() { bitField0_ = (bitField0_ & ~0x00000080); availableMemoryMB_ = 0L; onChanged(); return this; } private long allocatedMemoryMB_ ; /** * optional int64 allocatedMemoryMB = 9; * @return Whether the allocatedMemoryMB field is set. */ @java.lang.Override public boolean hasAllocatedMemoryMB() { return ((bitField0_ & 0x00000100) != 0); } /** * optional int64 allocatedMemoryMB = 9; * @return The allocatedMemoryMB. */ @java.lang.Override public long getAllocatedMemoryMB() { return allocatedMemoryMB_; } /** * optional int64 allocatedMemoryMB = 9; * @param value The allocatedMemoryMB to set. * @return This builder for chaining. */ public Builder setAllocatedMemoryMB(long value) { allocatedMemoryMB_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional int64 allocatedMemoryMB = 9; * @return This builder for chaining. */ public Builder clearAllocatedMemoryMB() { bitField0_ = (bitField0_ & ~0x00000100); allocatedMemoryMB_ = 0L; onChanged(); return this; } private long pendingMemoryMB_ ; /** * optional int64 pendingMemoryMB = 10; * @return Whether the pendingMemoryMB field is set. */ @java.lang.Override public boolean hasPendingMemoryMB() { return ((bitField0_ & 0x00000200) != 0); } /** * optional int64 pendingMemoryMB = 10; * @return The pendingMemoryMB. */ @java.lang.Override public long getPendingMemoryMB() { return pendingMemoryMB_; } /** * optional int64 pendingMemoryMB = 10; * @param value The pendingMemoryMB to set. * @return This builder for chaining. */ public Builder setPendingMemoryMB(long value) { pendingMemoryMB_ = value; bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional int64 pendingMemoryMB = 10; * @return This builder for chaining. */ public Builder clearPendingMemoryMB() { bitField0_ = (bitField0_ & ~0x00000200); pendingMemoryMB_ = 0L; onChanged(); return this; } private long reservedMemoryMB_ ; /** * optional int64 reservedMemoryMB = 11; * @return Whether the reservedMemoryMB field is set. */ @java.lang.Override public boolean hasReservedMemoryMB() { return ((bitField0_ & 0x00000400) != 0); } /** * optional int64 reservedMemoryMB = 11; * @return The reservedMemoryMB. */ @java.lang.Override public long getReservedMemoryMB() { return reservedMemoryMB_; } /** * optional int64 reservedMemoryMB = 11; * @param value The reservedMemoryMB to set. * @return This builder for chaining. */ public Builder setReservedMemoryMB(long value) { reservedMemoryMB_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional int64 reservedMemoryMB = 11; * @return This builder for chaining. */ public Builder clearReservedMemoryMB() { bitField0_ = (bitField0_ & ~0x00000400); reservedMemoryMB_ = 0L; onChanged(); return this; } private long availableVCores_ ; /** * optional int64 availableVCores = 12; * @return Whether the availableVCores field is set. */ @java.lang.Override public boolean hasAvailableVCores() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int64 availableVCores = 12; * @return The availableVCores. */ @java.lang.Override public long getAvailableVCores() { return availableVCores_; } /** * optional int64 availableVCores = 12; * @param value The availableVCores to set. * @return This builder for chaining. */ public Builder setAvailableVCores(long value) { availableVCores_ = value; bitField0_ |= 0x00000800; onChanged(); return this; } /** * optional int64 availableVCores = 12; * @return This builder for chaining. */ public Builder clearAvailableVCores() { bitField0_ = (bitField0_ & ~0x00000800); availableVCores_ = 0L; onChanged(); return this; } private long allocatedVCores_ ; /** * optional int64 allocatedVCores = 13; * @return Whether the allocatedVCores field is set. */ @java.lang.Override public boolean hasAllocatedVCores() { return ((bitField0_ & 0x00001000) != 0); } /** * optional int64 allocatedVCores = 13; * @return The allocatedVCores. */ @java.lang.Override public long getAllocatedVCores() { return allocatedVCores_; } /** * optional int64 allocatedVCores = 13; * @param value The allocatedVCores to set. * @return This builder for chaining. */ public Builder setAllocatedVCores(long value) { allocatedVCores_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } /** * optional int64 allocatedVCores = 13; * @return This builder for chaining. */ public Builder clearAllocatedVCores() { bitField0_ = (bitField0_ & ~0x00001000); allocatedVCores_ = 0L; onChanged(); return this; } private long pendingVCores_ ; /** * optional int64 pendingVCores = 14; * @return Whether the pendingVCores field is set. */ @java.lang.Override public boolean hasPendingVCores() { return ((bitField0_ & 0x00002000) != 0); } /** * optional int64 pendingVCores = 14; * @return The pendingVCores. */ @java.lang.Override public long getPendingVCores() { return pendingVCores_; } /** * optional int64 pendingVCores = 14; * @param value The pendingVCores to set. * @return This builder for chaining. */ public Builder setPendingVCores(long value) { pendingVCores_ = value; bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional int64 pendingVCores = 14; * @return This builder for chaining. */ public Builder clearPendingVCores() { bitField0_ = (bitField0_ & ~0x00002000); pendingVCores_ = 0L; onChanged(); return this; } private long reservedVCores_ ; /** * optional int64 reservedVCores = 15; * @return Whether the reservedVCores field is set. */ @java.lang.Override public boolean hasReservedVCores() { return ((bitField0_ & 0x00004000) != 0); } /** * optional int64 reservedVCores = 15; * @return The reservedVCores. */ @java.lang.Override public long getReservedVCores() { return reservedVCores_; } /** * optional int64 reservedVCores = 15; * @param value The reservedVCores to set. * @return This builder for chaining. */ public Builder setReservedVCores(long value) { reservedVCores_ = value; bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional int64 reservedVCores = 15; * @return This builder for chaining. */ public Builder clearReservedVCores() { bitField0_ = (bitField0_ & ~0x00004000); reservedVCores_ = 0L; onChanged(); return this; } private long allocatedContainers_ ; /** * optional int64 allocatedContainers = 16; * @return Whether the allocatedContainers field is set. */ @java.lang.Override public boolean hasAllocatedContainers() { return ((bitField0_ & 0x00008000) != 0); } /** * optional int64 allocatedContainers = 16; * @return The allocatedContainers. */ @java.lang.Override public long getAllocatedContainers() { return allocatedContainers_; } /** * optional int64 allocatedContainers = 16; * @param value The allocatedContainers to set. * @return This builder for chaining. */ public Builder setAllocatedContainers(long value) { allocatedContainers_ = value; bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional int64 allocatedContainers = 16; * @return This builder for chaining. */ public Builder clearAllocatedContainers() { bitField0_ = (bitField0_ & ~0x00008000); allocatedContainers_ = 0L; onChanged(); return this; } private long pendingContainers_ ; /** * optional int64 pendingContainers = 17; * @return Whether the pendingContainers field is set. */ @java.lang.Override public boolean hasPendingContainers() { return ((bitField0_ & 0x00010000) != 0); } /** * optional int64 pendingContainers = 17; * @return The pendingContainers. */ @java.lang.Override public long getPendingContainers() { return pendingContainers_; } /** * optional int64 pendingContainers = 17; * @param value The pendingContainers to set. * @return This builder for chaining. */ public Builder setPendingContainers(long value) { pendingContainers_ = value; bitField0_ |= 0x00010000; onChanged(); return this; } /** * optional int64 pendingContainers = 17; * @return This builder for chaining. */ public Builder clearPendingContainers() { bitField0_ = (bitField0_ & ~0x00010000); pendingContainers_ = 0L; onChanged(); return this; } private long reservedContainers_ ; /** * optional int64 reservedContainers = 18; * @return Whether the reservedContainers field is set. */ @java.lang.Override public boolean hasReservedContainers() { return ((bitField0_ & 0x00020000) != 0); } /** * optional int64 reservedContainers = 18; * @return The reservedContainers. */ @java.lang.Override public long getReservedContainers() { return reservedContainers_; } /** * optional int64 reservedContainers = 18; * @param value The reservedContainers to set. * @return This builder for chaining. */ public Builder setReservedContainers(long value) { reservedContainers_ = value; bitField0_ |= 0x00020000; onChanged(); return this; } /** * optional int64 reservedContainers = 18; * @return This builder for chaining. */ public Builder clearReservedContainers() { bitField0_ = (bitField0_ & ~0x00020000); reservedContainers_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueStatisticsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueStatisticsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public QueueStatisticsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface QueueInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueInfoProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string queueName = 1; * @return Whether the queueName field is set. */ boolean hasQueueName(); /** * optional string queueName = 1; * @return The queueName. */ java.lang.String getQueueName(); /** * optional string queueName = 1; * @return The bytes for queueName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes(); /** * optional float capacity = 2; * @return Whether the capacity field is set. */ boolean hasCapacity(); /** * optional float capacity = 2; * @return The capacity. */ float getCapacity(); /** * optional float maximumCapacity = 3; * @return Whether the maximumCapacity field is set. */ boolean hasMaximumCapacity(); /** * optional float maximumCapacity = 3; * @return The maximumCapacity. */ float getMaximumCapacity(); /** * optional float currentCapacity = 4; * @return Whether the currentCapacity field is set. */ boolean hasCurrentCapacity(); /** * optional float currentCapacity = 4; * @return The currentCapacity. */ float getCurrentCapacity(); /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return Whether the state field is set. */ boolean hasState(); /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return The state. */ org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState(); /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ java.util.List getChildQueuesList(); /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index); /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ int getChildQueuesCount(); /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ java.util.List getChildQueuesOrBuilderList(); /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder( int index); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ java.util.List getApplicationsList(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ int getApplicationsCount(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ java.util.List getApplicationsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index); /** * repeated string accessibleNodeLabels = 8; * @return A list containing the accessibleNodeLabels. */ java.util.List getAccessibleNodeLabelsList(); /** * repeated string accessibleNodeLabels = 8; * @return The count of accessibleNodeLabels. */ int getAccessibleNodeLabelsCount(); /** * repeated string accessibleNodeLabels = 8; * @param index The index of the element to return. * @return The accessibleNodeLabels at the given index. */ java.lang.String getAccessibleNodeLabels(int index); /** * repeated string accessibleNodeLabels = 8; * @param index The index of the value to return. * @return The bytes of the accessibleNodeLabels at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAccessibleNodeLabelsBytes(int index); /** * optional string defaultNodeLabelExpression = 9; * @return Whether the defaultNodeLabelExpression field is set. */ boolean hasDefaultNodeLabelExpression(); /** * optional string defaultNodeLabelExpression = 9; * @return The defaultNodeLabelExpression. */ java.lang.String getDefaultNodeLabelExpression(); /** * optional string defaultNodeLabelExpression = 9; * @return The bytes for defaultNodeLabelExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDefaultNodeLabelExpressionBytes(); /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return Whether the queueStatistics field is set. */ boolean hasQueueStatistics(); /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return The queueStatistics. */ org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics(); /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder(); /** * optional bool preemptionDisabled = 11; * @return Whether the preemptionDisabled field is set. */ boolean hasPreemptionDisabled(); /** * optional bool preemptionDisabled = 11; * @return The preemptionDisabled. */ boolean getPreemptionDisabled(); /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ java.util.List getQueueConfigurationsMapList(); /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index); /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ int getQueueConfigurationsMapCount(); /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ java.util.List getQueueConfigurationsMapOrBuilderList(); /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder( int index); /** * optional bool intraQueuePreemptionDisabled = 13; * @return Whether the intraQueuePreemptionDisabled field is set. */ boolean hasIntraQueuePreemptionDisabled(); /** * optional bool intraQueuePreemptionDisabled = 13; * @return The intraQueuePreemptionDisabled. */ boolean getIntraQueuePreemptionDisabled(); /** * optional float weight = 14; * @return Whether the weight field is set. */ boolean hasWeight(); /** * optional float weight = 14; * @return The weight. */ float getWeight(); /** * optional string queuePath = 15; * @return Whether the queuePath field is set. */ boolean hasQueuePath(); /** * optional string queuePath = 15; * @return The queuePath. */ java.lang.String getQueuePath(); /** * optional string queuePath = 15; * @return The bytes for queuePath. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueuePathBytes(); /** * optional int32 maxParallelApps = 16; * @return Whether the maxParallelApps field is set. */ boolean hasMaxParallelApps(); /** * optional int32 maxParallelApps = 16; * @return The maxParallelApps. */ int getMaxParallelApps(); /** * optional string schedulerType = 17; * @return Whether the schedulerType field is set. */ boolean hasSchedulerType(); /** * optional string schedulerType = 17; * @return The schedulerType. */ java.lang.String getSchedulerType(); /** * optional string schedulerType = 17; * @return The bytes for schedulerType. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSchedulerTypeBytes(); /** * optional int32 minResourceVCore = 18; * @return Whether the minResourceVCore field is set. */ boolean hasMinResourceVCore(); /** * optional int32 minResourceVCore = 18; * @return The minResourceVCore. */ int getMinResourceVCore(); /** * optional int64 minResourceMemory = 19; * @return Whether the minResourceMemory field is set. */ boolean hasMinResourceMemory(); /** * optional int64 minResourceMemory = 19; * @return The minResourceMemory. */ long getMinResourceMemory(); /** * optional int32 maxResourceVCore = 20; * @return Whether the maxResourceVCore field is set. */ boolean hasMaxResourceVCore(); /** * optional int32 maxResourceVCore = 20; * @return The maxResourceVCore. */ int getMaxResourceVCore(); /** * optional int64 maxResourceMemory = 21; * @return Whether the maxResourceMemory field is set. */ boolean hasMaxResourceMemory(); /** * optional int64 maxResourceMemory = 21; * @return The maxResourceMemory. */ long getMaxResourceMemory(); /** * optional int32 reservedResourceVCore = 22; * @return Whether the reservedResourceVCore field is set. */ boolean hasReservedResourceVCore(); /** * optional int32 reservedResourceVCore = 22; * @return The reservedResourceVCore. */ int getReservedResourceVCore(); /** * optional int64 reservedResourceMemory = 23; * @return Whether the reservedResourceMemory field is set. */ boolean hasReservedResourceMemory(); /** * optional int64 reservedResourceMemory = 23; * @return The reservedResourceMemory. */ long getReservedResourceMemory(); /** * optional int32 steadyFairShareVCore = 24; * @return Whether the steadyFairShareVCore field is set. */ boolean hasSteadyFairShareVCore(); /** * optional int32 steadyFairShareVCore = 24; * @return The steadyFairShareVCore. */ int getSteadyFairShareVCore(); /** * optional int64 steadyFairShareMemory = 25; * @return Whether the steadyFairShareMemory field is set. */ boolean hasSteadyFairShareMemory(); /** * optional int64 steadyFairShareMemory = 25; * @return The steadyFairShareMemory. */ long getSteadyFairShareMemory(); /** * optional string subClusterId = 26; * @return Whether the subClusterId field is set. */ boolean hasSubClusterId(); /** * optional string subClusterId = 26; * @return The subClusterId. */ java.lang.String getSubClusterId(); /** * optional string subClusterId = 26; * @return The bytes for subClusterId. */ org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes(); /** * optional int32 maxRunningApp = 27; * @return Whether the maxRunningApp field is set. */ boolean hasMaxRunningApp(); /** * optional int32 maxRunningApp = 27; * @return The maxRunningApp. */ int getMaxRunningApp(); } /** * Protobuf type {@code hadoop.yarn.QueueInfoProto} */ public static final class QueueInfoProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueInfoProto) QueueInfoProtoOrBuilder { private static final long serialVersionUID = 0L; // Use QueueInfoProto.newBuilder() to construct. private QueueInfoProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private QueueInfoProto() { queueName_ = ""; state_ = 1; childQueues_ = java.util.Collections.emptyList(); applications_ = java.util.Collections.emptyList(); accessibleNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; defaultNodeLabelExpression_ = ""; queueConfigurationsMap_ = java.util.Collections.emptyList(); queuePath_ = ""; schedulerType_ = ""; subClusterId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new QueueInfoProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder.class); } private int bitField0_; public static final int QUEUENAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object queueName_ = ""; /** * optional string queueName = 1; * @return Whether the queueName field is set. */ @java.lang.Override public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; * @return The queueName. */ @java.lang.Override public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } } /** * optional string queueName = 1; * @return The bytes for queueName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CAPACITY_FIELD_NUMBER = 2; private float capacity_ = 0F; /** * optional float capacity = 2; * @return Whether the capacity field is set. */ @java.lang.Override public boolean hasCapacity() { return ((bitField0_ & 0x00000002) != 0); } /** * optional float capacity = 2; * @return The capacity. */ @java.lang.Override public float getCapacity() { return capacity_; } public static final int MAXIMUMCAPACITY_FIELD_NUMBER = 3; private float maximumCapacity_ = 0F; /** * optional float maximumCapacity = 3; * @return Whether the maximumCapacity field is set. */ @java.lang.Override public boolean hasMaximumCapacity() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float maximumCapacity = 3; * @return The maximumCapacity. */ @java.lang.Override public float getMaximumCapacity() { return maximumCapacity_; } public static final int CURRENTCAPACITY_FIELD_NUMBER = 4; private float currentCapacity_ = 0F; /** * optional float currentCapacity = 4; * @return Whether the currentCapacity field is set. */ @java.lang.Override public boolean hasCurrentCapacity() { return ((bitField0_ & 0x00000008) != 0); } /** * optional float currentCapacity = 4; * @return The currentCapacity. */ @java.lang.Override public float getCurrentCapacity() { return currentCapacity_; } public static final int STATE_FIELD_NUMBER = 5; private int state_ = 1; /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return The state. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState() { org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(state_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.Q_STOPPED : result; } public static final int CHILDQUEUES_FIELD_NUMBER = 6; @SuppressWarnings("serial") private java.util.List childQueues_; /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ @java.lang.Override public java.util.List getChildQueuesList() { return childQueues_; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ @java.lang.Override public java.util.List getChildQueuesOrBuilderList() { return childQueues_; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ @java.lang.Override public int getChildQueuesCount() { return childQueues_.size(); } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index) { return childQueues_.get(index); } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder( int index) { return childQueues_.get(index); } public static final int APPLICATIONS_FIELD_NUMBER = 7; @SuppressWarnings("serial") private java.util.List applications_; /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ @java.lang.Override public java.util.List getApplicationsList() { return applications_; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ @java.lang.Override public java.util.List getApplicationsOrBuilderList() { return applications_; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ @java.lang.Override public int getApplicationsCount() { return applications_.size(); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) { return applications_.get(index); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index) { return applications_.get(index); } public static final int ACCESSIBLENODELABELS_FIELD_NUMBER = 8; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList accessibleNodeLabels_; /** * repeated string accessibleNodeLabels = 8; * @return A list containing the accessibleNodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAccessibleNodeLabelsList() { return accessibleNodeLabels_; } /** * repeated string accessibleNodeLabels = 8; * @return The count of accessibleNodeLabels. */ public int getAccessibleNodeLabelsCount() { return accessibleNodeLabels_.size(); } /** * repeated string accessibleNodeLabels = 8; * @param index The index of the element to return. * @return The accessibleNodeLabels at the given index. */ public java.lang.String getAccessibleNodeLabels(int index) { return accessibleNodeLabels_.get(index); } /** * repeated string accessibleNodeLabels = 8; * @param index The index of the value to return. * @return The bytes of the accessibleNodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAccessibleNodeLabelsBytes(int index) { return accessibleNodeLabels_.getByteString(index); } public static final int DEFAULTNODELABELEXPRESSION_FIELD_NUMBER = 9; @SuppressWarnings("serial") private volatile java.lang.Object defaultNodeLabelExpression_ = ""; /** * optional string defaultNodeLabelExpression = 9; * @return Whether the defaultNodeLabelExpression field is set. */ @java.lang.Override public boolean hasDefaultNodeLabelExpression() { return ((bitField0_ & 0x00000020) != 0); } /** * optional string defaultNodeLabelExpression = 9; * @return The defaultNodeLabelExpression. */ @java.lang.Override public java.lang.String getDefaultNodeLabelExpression() { java.lang.Object ref = defaultNodeLabelExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { defaultNodeLabelExpression_ = s; } return s; } } /** * optional string defaultNodeLabelExpression = 9; * @return The bytes for defaultNodeLabelExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDefaultNodeLabelExpressionBytes() { java.lang.Object ref = defaultNodeLabelExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); defaultNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int QUEUESTATISTICS_FIELD_NUMBER = 10; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto queueStatistics_; /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return Whether the queueStatistics field is set. */ @java.lang.Override public boolean hasQueueStatistics() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return The queueStatistics. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics() { return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_; } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder() { return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_; } public static final int PREEMPTIONDISABLED_FIELD_NUMBER = 11; private boolean preemptionDisabled_ = false; /** * optional bool preemptionDisabled = 11; * @return Whether the preemptionDisabled field is set. */ @java.lang.Override public boolean hasPreemptionDisabled() { return ((bitField0_ & 0x00000080) != 0); } /** * optional bool preemptionDisabled = 11; * @return The preemptionDisabled. */ @java.lang.Override public boolean getPreemptionDisabled() { return preemptionDisabled_; } public static final int QUEUECONFIGURATIONSMAP_FIELD_NUMBER = 12; @SuppressWarnings("serial") private java.util.List queueConfigurationsMap_; /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ @java.lang.Override public java.util.List getQueueConfigurationsMapList() { return queueConfigurationsMap_; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ @java.lang.Override public java.util.List getQueueConfigurationsMapOrBuilderList() { return queueConfigurationsMap_; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ @java.lang.Override public int getQueueConfigurationsMapCount() { return queueConfigurationsMap_.size(); } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index) { return queueConfigurationsMap_.get(index); } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder( int index) { return queueConfigurationsMap_.get(index); } public static final int INTRAQUEUEPREEMPTIONDISABLED_FIELD_NUMBER = 13; private boolean intraQueuePreemptionDisabled_ = false; /** * optional bool intraQueuePreemptionDisabled = 13; * @return Whether the intraQueuePreemptionDisabled field is set. */ @java.lang.Override public boolean hasIntraQueuePreemptionDisabled() { return ((bitField0_ & 0x00000100) != 0); } /** * optional bool intraQueuePreemptionDisabled = 13; * @return The intraQueuePreemptionDisabled. */ @java.lang.Override public boolean getIntraQueuePreemptionDisabled() { return intraQueuePreemptionDisabled_; } public static final int WEIGHT_FIELD_NUMBER = 14; private float weight_ = 0F; /** * optional float weight = 14; * @return Whether the weight field is set. */ @java.lang.Override public boolean hasWeight() { return ((bitField0_ & 0x00000200) != 0); } /** * optional float weight = 14; * @return The weight. */ @java.lang.Override public float getWeight() { return weight_; } public static final int QUEUEPATH_FIELD_NUMBER = 15; @SuppressWarnings("serial") private volatile java.lang.Object queuePath_ = ""; /** * optional string queuePath = 15; * @return Whether the queuePath field is set. */ @java.lang.Override public boolean hasQueuePath() { return ((bitField0_ & 0x00000400) != 0); } /** * optional string queuePath = 15; * @return The queuePath. */ @java.lang.Override public java.lang.String getQueuePath() { java.lang.Object ref = queuePath_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queuePath_ = s; } return s; } } /** * optional string queuePath = 15; * @return The bytes for queuePath. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueuePathBytes() { java.lang.Object ref = queuePath_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queuePath_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int MAXPARALLELAPPS_FIELD_NUMBER = 16; private int maxParallelApps_ = 0; /** * optional int32 maxParallelApps = 16; * @return Whether the maxParallelApps field is set. */ @java.lang.Override public boolean hasMaxParallelApps() { return ((bitField0_ & 0x00000800) != 0); } /** * optional int32 maxParallelApps = 16; * @return The maxParallelApps. */ @java.lang.Override public int getMaxParallelApps() { return maxParallelApps_; } public static final int SCHEDULERTYPE_FIELD_NUMBER = 17; @SuppressWarnings("serial") private volatile java.lang.Object schedulerType_ = ""; /** * optional string schedulerType = 17; * @return Whether the schedulerType field is set. */ @java.lang.Override public boolean hasSchedulerType() { return ((bitField0_ & 0x00001000) != 0); } /** * optional string schedulerType = 17; * @return The schedulerType. */ @java.lang.Override public java.lang.String getSchedulerType() { java.lang.Object ref = schedulerType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { schedulerType_ = s; } return s; } } /** * optional string schedulerType = 17; * @return The bytes for schedulerType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSchedulerTypeBytes() { java.lang.Object ref = schedulerType_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); schedulerType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int MINRESOURCEVCORE_FIELD_NUMBER = 18; private int minResourceVCore_ = 0; /** * optional int32 minResourceVCore = 18; * @return Whether the minResourceVCore field is set. */ @java.lang.Override public boolean hasMinResourceVCore() { return ((bitField0_ & 0x00002000) != 0); } /** * optional int32 minResourceVCore = 18; * @return The minResourceVCore. */ @java.lang.Override public int getMinResourceVCore() { return minResourceVCore_; } public static final int MINRESOURCEMEMORY_FIELD_NUMBER = 19; private long minResourceMemory_ = 0L; /** * optional int64 minResourceMemory = 19; * @return Whether the minResourceMemory field is set. */ @java.lang.Override public boolean hasMinResourceMemory() { return ((bitField0_ & 0x00004000) != 0); } /** * optional int64 minResourceMemory = 19; * @return The minResourceMemory. */ @java.lang.Override public long getMinResourceMemory() { return minResourceMemory_; } public static final int MAXRESOURCEVCORE_FIELD_NUMBER = 20; private int maxResourceVCore_ = 0; /** * optional int32 maxResourceVCore = 20; * @return Whether the maxResourceVCore field is set. */ @java.lang.Override public boolean hasMaxResourceVCore() { return ((bitField0_ & 0x00008000) != 0); } /** * optional int32 maxResourceVCore = 20; * @return The maxResourceVCore. */ @java.lang.Override public int getMaxResourceVCore() { return maxResourceVCore_; } public static final int MAXRESOURCEMEMORY_FIELD_NUMBER = 21; private long maxResourceMemory_ = 0L; /** * optional int64 maxResourceMemory = 21; * @return Whether the maxResourceMemory field is set. */ @java.lang.Override public boolean hasMaxResourceMemory() { return ((bitField0_ & 0x00010000) != 0); } /** * optional int64 maxResourceMemory = 21; * @return The maxResourceMemory. */ @java.lang.Override public long getMaxResourceMemory() { return maxResourceMemory_; } public static final int RESERVEDRESOURCEVCORE_FIELD_NUMBER = 22; private int reservedResourceVCore_ = 0; /** * optional int32 reservedResourceVCore = 22; * @return Whether the reservedResourceVCore field is set. */ @java.lang.Override public boolean hasReservedResourceVCore() { return ((bitField0_ & 0x00020000) != 0); } /** * optional int32 reservedResourceVCore = 22; * @return The reservedResourceVCore. */ @java.lang.Override public int getReservedResourceVCore() { return reservedResourceVCore_; } public static final int RESERVEDRESOURCEMEMORY_FIELD_NUMBER = 23; private long reservedResourceMemory_ = 0L; /** * optional int64 reservedResourceMemory = 23; * @return Whether the reservedResourceMemory field is set. */ @java.lang.Override public boolean hasReservedResourceMemory() { return ((bitField0_ & 0x00040000) != 0); } /** * optional int64 reservedResourceMemory = 23; * @return The reservedResourceMemory. */ @java.lang.Override public long getReservedResourceMemory() { return reservedResourceMemory_; } public static final int STEADYFAIRSHAREVCORE_FIELD_NUMBER = 24; private int steadyFairShareVCore_ = 0; /** * optional int32 steadyFairShareVCore = 24; * @return Whether the steadyFairShareVCore field is set. */ @java.lang.Override public boolean hasSteadyFairShareVCore() { return ((bitField0_ & 0x00080000) != 0); } /** * optional int32 steadyFairShareVCore = 24; * @return The steadyFairShareVCore. */ @java.lang.Override public int getSteadyFairShareVCore() { return steadyFairShareVCore_; } public static final int STEADYFAIRSHAREMEMORY_FIELD_NUMBER = 25; private long steadyFairShareMemory_ = 0L; /** * optional int64 steadyFairShareMemory = 25; * @return Whether the steadyFairShareMemory field is set. */ @java.lang.Override public boolean hasSteadyFairShareMemory() { return ((bitField0_ & 0x00100000) != 0); } /** * optional int64 steadyFairShareMemory = 25; * @return The steadyFairShareMemory. */ @java.lang.Override public long getSteadyFairShareMemory() { return steadyFairShareMemory_; } public static final int SUBCLUSTERID_FIELD_NUMBER = 26; @SuppressWarnings("serial") private volatile java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 26; * @return Whether the subClusterId field is set. */ @java.lang.Override public boolean hasSubClusterId() { return ((bitField0_ & 0x00200000) != 0); } /** * optional string subClusterId = 26; * @return The subClusterId. */ @java.lang.Override public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } } /** * optional string subClusterId = 26; * @return The bytes for subClusterId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int MAXRUNNINGAPP_FIELD_NUMBER = 27; private int maxRunningApp_ = 0; /** * optional int32 maxRunningApp = 27; * @return Whether the maxRunningApp field is set. */ @java.lang.Override public boolean hasMaxRunningApp() { return ((bitField0_ & 0x00400000) != 0); } /** * optional int32 maxRunningApp = 27; * @return The maxRunningApp. */ @java.lang.Override public int getMaxRunningApp() { return maxRunningApp_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getChildQueuesCount(); i++) { if (!getChildQueues(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getApplicationsCount(); i++) { if (!getApplications(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getQueueConfigurationsMapCount(); i++) { if (!getQueueConfigurationsMap(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeFloat(2, capacity_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeFloat(3, maximumCapacity_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeFloat(4, currentCapacity_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeEnum(5, state_); } for (int i = 0; i < childQueues_.size(); i++) { output.writeMessage(6, childQueues_.get(i)); } for (int i = 0; i < applications_.size(); i++) { output.writeMessage(7, applications_.get(i)); } for (int i = 0; i < accessibleNodeLabels_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, accessibleNodeLabels_.getRaw(i)); } if (((bitField0_ & 0x00000020) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, defaultNodeLabelExpression_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(10, getQueueStatistics()); } if (((bitField0_ & 0x00000080) != 0)) { output.writeBool(11, preemptionDisabled_); } for (int i = 0; i < queueConfigurationsMap_.size(); i++) { output.writeMessage(12, queueConfigurationsMap_.get(i)); } if (((bitField0_ & 0x00000100) != 0)) { output.writeBool(13, intraQueuePreemptionDisabled_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeFloat(14, weight_); } if (((bitField0_ & 0x00000400) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 15, queuePath_); } if (((bitField0_ & 0x00000800) != 0)) { output.writeInt32(16, maxParallelApps_); } if (((bitField0_ & 0x00001000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 17, schedulerType_); } if (((bitField0_ & 0x00002000) != 0)) { output.writeInt32(18, minResourceVCore_); } if (((bitField0_ & 0x00004000) != 0)) { output.writeInt64(19, minResourceMemory_); } if (((bitField0_ & 0x00008000) != 0)) { output.writeInt32(20, maxResourceVCore_); } if (((bitField0_ & 0x00010000) != 0)) { output.writeInt64(21, maxResourceMemory_); } if (((bitField0_ & 0x00020000) != 0)) { output.writeInt32(22, reservedResourceVCore_); } if (((bitField0_ & 0x00040000) != 0)) { output.writeInt64(23, reservedResourceMemory_); } if (((bitField0_ & 0x00080000) != 0)) { output.writeInt32(24, steadyFairShareVCore_); } if (((bitField0_ & 0x00100000) != 0)) { output.writeInt64(25, steadyFairShareMemory_); } if (((bitField0_ & 0x00200000) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 26, subClusterId_); } if (((bitField0_ & 0x00400000) != 0)) { output.writeInt32(27, maxRunningApp_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(2, capacity_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(3, maximumCapacity_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(4, currentCapacity_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(5, state_); } for (int i = 0; i < childQueues_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, childQueues_.get(i)); } for (int i = 0; i < applications_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, applications_.get(i)); } { int dataSize = 0; for (int i = 0; i < accessibleNodeLabels_.size(); i++) { dataSize += computeStringSizeNoTag(accessibleNodeLabels_.getRaw(i)); } size += dataSize; size += 1 * getAccessibleNodeLabelsList().size(); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, defaultNodeLabelExpression_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(10, getQueueStatistics()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(11, preemptionDisabled_); } for (int i = 0; i < queueConfigurationsMap_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(12, queueConfigurationsMap_.get(i)); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(13, intraQueuePreemptionDisabled_); } if (((bitField0_ & 0x00000200) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(14, weight_); } if (((bitField0_ & 0x00000400) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(15, queuePath_); } if (((bitField0_ & 0x00000800) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(16, maxParallelApps_); } if (((bitField0_ & 0x00001000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(17, schedulerType_); } if (((bitField0_ & 0x00002000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(18, minResourceVCore_); } if (((bitField0_ & 0x00004000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(19, minResourceMemory_); } if (((bitField0_ & 0x00008000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(20, maxResourceVCore_); } if (((bitField0_ & 0x00010000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(21, maxResourceMemory_); } if (((bitField0_ & 0x00020000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(22, reservedResourceVCore_); } if (((bitField0_ & 0x00040000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(23, reservedResourceMemory_); } if (((bitField0_ & 0x00080000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(24, steadyFairShareVCore_); } if (((bitField0_ & 0x00100000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(25, steadyFairShareMemory_); } if (((bitField0_ & 0x00200000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(26, subClusterId_); } if (((bitField0_ & 0x00400000) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(27, maxRunningApp_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto) obj; if (hasQueueName() != other.hasQueueName()) return false; if (hasQueueName()) { if (!getQueueName() .equals(other.getQueueName())) return false; } if (hasCapacity() != other.hasCapacity()) return false; if (hasCapacity()) { if (java.lang.Float.floatToIntBits(getCapacity()) != java.lang.Float.floatToIntBits( other.getCapacity())) return false; } if (hasMaximumCapacity() != other.hasMaximumCapacity()) return false; if (hasMaximumCapacity()) { if (java.lang.Float.floatToIntBits(getMaximumCapacity()) != java.lang.Float.floatToIntBits( other.getMaximumCapacity())) return false; } if (hasCurrentCapacity() != other.hasCurrentCapacity()) return false; if (hasCurrentCapacity()) { if (java.lang.Float.floatToIntBits(getCurrentCapacity()) != java.lang.Float.floatToIntBits( other.getCurrentCapacity())) return false; } if (hasState() != other.hasState()) return false; if (hasState()) { if (state_ != other.state_) return false; } if (!getChildQueuesList() .equals(other.getChildQueuesList())) return false; if (!getApplicationsList() .equals(other.getApplicationsList())) return false; if (!getAccessibleNodeLabelsList() .equals(other.getAccessibleNodeLabelsList())) return false; if (hasDefaultNodeLabelExpression() != other.hasDefaultNodeLabelExpression()) return false; if (hasDefaultNodeLabelExpression()) { if (!getDefaultNodeLabelExpression() .equals(other.getDefaultNodeLabelExpression())) return false; } if (hasQueueStatistics() != other.hasQueueStatistics()) return false; if (hasQueueStatistics()) { if (!getQueueStatistics() .equals(other.getQueueStatistics())) return false; } if (hasPreemptionDisabled() != other.hasPreemptionDisabled()) return false; if (hasPreemptionDisabled()) { if (getPreemptionDisabled() != other.getPreemptionDisabled()) return false; } if (!getQueueConfigurationsMapList() .equals(other.getQueueConfigurationsMapList())) return false; if (hasIntraQueuePreemptionDisabled() != other.hasIntraQueuePreemptionDisabled()) return false; if (hasIntraQueuePreemptionDisabled()) { if (getIntraQueuePreemptionDisabled() != other.getIntraQueuePreemptionDisabled()) return false; } if (hasWeight() != other.hasWeight()) return false; if (hasWeight()) { if (java.lang.Float.floatToIntBits(getWeight()) != java.lang.Float.floatToIntBits( other.getWeight())) return false; } if (hasQueuePath() != other.hasQueuePath()) return false; if (hasQueuePath()) { if (!getQueuePath() .equals(other.getQueuePath())) return false; } if (hasMaxParallelApps() != other.hasMaxParallelApps()) return false; if (hasMaxParallelApps()) { if (getMaxParallelApps() != other.getMaxParallelApps()) return false; } if (hasSchedulerType() != other.hasSchedulerType()) return false; if (hasSchedulerType()) { if (!getSchedulerType() .equals(other.getSchedulerType())) return false; } if (hasMinResourceVCore() != other.hasMinResourceVCore()) return false; if (hasMinResourceVCore()) { if (getMinResourceVCore() != other.getMinResourceVCore()) return false; } if (hasMinResourceMemory() != other.hasMinResourceMemory()) return false; if (hasMinResourceMemory()) { if (getMinResourceMemory() != other.getMinResourceMemory()) return false; } if (hasMaxResourceVCore() != other.hasMaxResourceVCore()) return false; if (hasMaxResourceVCore()) { if (getMaxResourceVCore() != other.getMaxResourceVCore()) return false; } if (hasMaxResourceMemory() != other.hasMaxResourceMemory()) return false; if (hasMaxResourceMemory()) { if (getMaxResourceMemory() != other.getMaxResourceMemory()) return false; } if (hasReservedResourceVCore() != other.hasReservedResourceVCore()) return false; if (hasReservedResourceVCore()) { if (getReservedResourceVCore() != other.getReservedResourceVCore()) return false; } if (hasReservedResourceMemory() != other.hasReservedResourceMemory()) return false; if (hasReservedResourceMemory()) { if (getReservedResourceMemory() != other.getReservedResourceMemory()) return false; } if (hasSteadyFairShareVCore() != other.hasSteadyFairShareVCore()) return false; if (hasSteadyFairShareVCore()) { if (getSteadyFairShareVCore() != other.getSteadyFairShareVCore()) return false; } if (hasSteadyFairShareMemory() != other.hasSteadyFairShareMemory()) return false; if (hasSteadyFairShareMemory()) { if (getSteadyFairShareMemory() != other.getSteadyFairShareMemory()) return false; } if (hasSubClusterId() != other.hasSubClusterId()) return false; if (hasSubClusterId()) { if (!getSubClusterId() .equals(other.getSubClusterId())) return false; } if (hasMaxRunningApp() != other.hasMaxRunningApp()) return false; if (hasMaxRunningApp()) { if (getMaxRunningApp() != other.getMaxRunningApp()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueueName()) { hash = (37 * hash) + QUEUENAME_FIELD_NUMBER; hash = (53 * hash) + getQueueName().hashCode(); } if (hasCapacity()) { hash = (37 * hash) + CAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getCapacity()); } if (hasMaximumCapacity()) { hash = (37 * hash) + MAXIMUMCAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getMaximumCapacity()); } if (hasCurrentCapacity()) { hash = (37 * hash) + CURRENTCAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getCurrentCapacity()); } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } if (getChildQueuesCount() > 0) { hash = (37 * hash) + CHILDQUEUES_FIELD_NUMBER; hash = (53 * hash) + getChildQueuesList().hashCode(); } if (getApplicationsCount() > 0) { hash = (37 * hash) + APPLICATIONS_FIELD_NUMBER; hash = (53 * hash) + getApplicationsList().hashCode(); } if (getAccessibleNodeLabelsCount() > 0) { hash = (37 * hash) + ACCESSIBLENODELABELS_FIELD_NUMBER; hash = (53 * hash) + getAccessibleNodeLabelsList().hashCode(); } if (hasDefaultNodeLabelExpression()) { hash = (37 * hash) + DEFAULTNODELABELEXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getDefaultNodeLabelExpression().hashCode(); } if (hasQueueStatistics()) { hash = (37 * hash) + QUEUESTATISTICS_FIELD_NUMBER; hash = (53 * hash) + getQueueStatistics().hashCode(); } if (hasPreemptionDisabled()) { hash = (37 * hash) + PREEMPTIONDISABLED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getPreemptionDisabled()); } if (getQueueConfigurationsMapCount() > 0) { hash = (37 * hash) + QUEUECONFIGURATIONSMAP_FIELD_NUMBER; hash = (53 * hash) + getQueueConfigurationsMapList().hashCode(); } if (hasIntraQueuePreemptionDisabled()) { hash = (37 * hash) + INTRAQUEUEPREEMPTIONDISABLED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getIntraQueuePreemptionDisabled()); } if (hasWeight()) { hash = (37 * hash) + WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getWeight()); } if (hasQueuePath()) { hash = (37 * hash) + QUEUEPATH_FIELD_NUMBER; hash = (53 * hash) + getQueuePath().hashCode(); } if (hasMaxParallelApps()) { hash = (37 * hash) + MAXPARALLELAPPS_FIELD_NUMBER; hash = (53 * hash) + getMaxParallelApps(); } if (hasSchedulerType()) { hash = (37 * hash) + SCHEDULERTYPE_FIELD_NUMBER; hash = (53 * hash) + getSchedulerType().hashCode(); } if (hasMinResourceVCore()) { hash = (37 * hash) + MINRESOURCEVCORE_FIELD_NUMBER; hash = (53 * hash) + getMinResourceVCore(); } if (hasMinResourceMemory()) { hash = (37 * hash) + MINRESOURCEMEMORY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getMinResourceMemory()); } if (hasMaxResourceVCore()) { hash = (37 * hash) + MAXRESOURCEVCORE_FIELD_NUMBER; hash = (53 * hash) + getMaxResourceVCore(); } if (hasMaxResourceMemory()) { hash = (37 * hash) + MAXRESOURCEMEMORY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getMaxResourceMemory()); } if (hasReservedResourceVCore()) { hash = (37 * hash) + RESERVEDRESOURCEVCORE_FIELD_NUMBER; hash = (53 * hash) + getReservedResourceVCore(); } if (hasReservedResourceMemory()) { hash = (37 * hash) + RESERVEDRESOURCEMEMORY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getReservedResourceMemory()); } if (hasSteadyFairShareVCore()) { hash = (37 * hash) + STEADYFAIRSHAREVCORE_FIELD_NUMBER; hash = (53 * hash) + getSteadyFairShareVCore(); } if (hasSteadyFairShareMemory()) { hash = (37 * hash) + STEADYFAIRSHAREMEMORY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getSteadyFairShareMemory()); } if (hasSubClusterId()) { hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER; hash = (53 * hash) + getSubClusterId().hashCode(); } if (hasMaxRunningApp()) { hash = (37 * hash) + MAXRUNNINGAPP_FIELD_NUMBER; hash = (53 * hash) + getMaxRunningApp(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.QueueInfoProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueInfoProto) org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getChildQueuesFieldBuilder(); getApplicationsFieldBuilder(); getQueueStatisticsFieldBuilder(); getQueueConfigurationsMapFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; queueName_ = ""; capacity_ = 0F; maximumCapacity_ = 0F; currentCapacity_ = 0F; state_ = 1; if (childQueuesBuilder_ == null) { childQueues_ = java.util.Collections.emptyList(); } else { childQueues_ = null; childQueuesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); if (applicationsBuilder_ == null) { applications_ = java.util.Collections.emptyList(); } else { applications_ = null; applicationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000040); accessibleNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000080); defaultNodeLabelExpression_ = ""; queueStatistics_ = null; if (queueStatisticsBuilder_ != null) { queueStatisticsBuilder_.dispose(); queueStatisticsBuilder_ = null; } preemptionDisabled_ = false; if (queueConfigurationsMapBuilder_ == null) { queueConfigurationsMap_ = java.util.Collections.emptyList(); } else { queueConfigurationsMap_ = null; queueConfigurationsMapBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000800); intraQueuePreemptionDisabled_ = false; weight_ = 0F; queuePath_ = ""; maxParallelApps_ = 0; schedulerType_ = ""; minResourceVCore_ = 0; minResourceMemory_ = 0L; maxResourceVCore_ = 0; maxResourceMemory_ = 0L; reservedResourceVCore_ = 0; reservedResourceMemory_ = 0L; steadyFairShareVCore_ = 0; steadyFairShareMemory_ = 0L; subClusterId_ = ""; maxRunningApp_ = 0; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto build() { org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result) { if (childQueuesBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { childQueues_ = java.util.Collections.unmodifiableList(childQueues_); bitField0_ = (bitField0_ & ~0x00000020); } result.childQueues_ = childQueues_; } else { result.childQueues_ = childQueuesBuilder_.build(); } if (applicationsBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0)) { applications_ = java.util.Collections.unmodifiableList(applications_); bitField0_ = (bitField0_ & ~0x00000040); } result.applications_ = applications_; } else { result.applications_ = applicationsBuilder_.build(); } if (((bitField0_ & 0x00000080) != 0)) { accessibleNodeLabels_ = accessibleNodeLabels_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000080); } result.accessibleNodeLabels_ = accessibleNodeLabels_; if (queueConfigurationsMapBuilder_ == null) { if (((bitField0_ & 0x00000800) != 0)) { queueConfigurationsMap_ = java.util.Collections.unmodifiableList(queueConfigurationsMap_); bitField0_ = (bitField0_ & ~0x00000800); } result.queueConfigurationsMap_ = queueConfigurationsMap_; } else { result.queueConfigurationsMap_ = queueConfigurationsMapBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.queueName_ = queueName_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.capacity_ = capacity_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.maximumCapacity_ = maximumCapacity_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.currentCapacity_ = currentCapacity_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.state_ = state_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000100) != 0)) { result.defaultNodeLabelExpression_ = defaultNodeLabelExpression_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000200) != 0)) { result.queueStatistics_ = queueStatisticsBuilder_ == null ? queueStatistics_ : queueStatisticsBuilder_.build(); to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000400) != 0)) { result.preemptionDisabled_ = preemptionDisabled_; to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00001000) != 0)) { result.intraQueuePreemptionDisabled_ = intraQueuePreemptionDisabled_; to_bitField0_ |= 0x00000100; } if (((from_bitField0_ & 0x00002000) != 0)) { result.weight_ = weight_; to_bitField0_ |= 0x00000200; } if (((from_bitField0_ & 0x00004000) != 0)) { result.queuePath_ = queuePath_; to_bitField0_ |= 0x00000400; } if (((from_bitField0_ & 0x00008000) != 0)) { result.maxParallelApps_ = maxParallelApps_; to_bitField0_ |= 0x00000800; } if (((from_bitField0_ & 0x00010000) != 0)) { result.schedulerType_ = schedulerType_; to_bitField0_ |= 0x00001000; } if (((from_bitField0_ & 0x00020000) != 0)) { result.minResourceVCore_ = minResourceVCore_; to_bitField0_ |= 0x00002000; } if (((from_bitField0_ & 0x00040000) != 0)) { result.minResourceMemory_ = minResourceMemory_; to_bitField0_ |= 0x00004000; } if (((from_bitField0_ & 0x00080000) != 0)) { result.maxResourceVCore_ = maxResourceVCore_; to_bitField0_ |= 0x00008000; } if (((from_bitField0_ & 0x00100000) != 0)) { result.maxResourceMemory_ = maxResourceMemory_; to_bitField0_ |= 0x00010000; } if (((from_bitField0_ & 0x00200000) != 0)) { result.reservedResourceVCore_ = reservedResourceVCore_; to_bitField0_ |= 0x00020000; } if (((from_bitField0_ & 0x00400000) != 0)) { result.reservedResourceMemory_ = reservedResourceMemory_; to_bitField0_ |= 0x00040000; } if (((from_bitField0_ & 0x00800000) != 0)) { result.steadyFairShareVCore_ = steadyFairShareVCore_; to_bitField0_ |= 0x00080000; } if (((from_bitField0_ & 0x01000000) != 0)) { result.steadyFairShareMemory_ = steadyFairShareMemory_; to_bitField0_ |= 0x00100000; } if (((from_bitField0_ & 0x02000000) != 0)) { result.subClusterId_ = subClusterId_; to_bitField0_ |= 0x00200000; } if (((from_bitField0_ & 0x04000000) != 0)) { result.maxRunningApp_ = maxRunningApp_; to_bitField0_ |= 0x00400000; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()) return this; if (other.hasQueueName()) { queueName_ = other.queueName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCapacity()) { setCapacity(other.getCapacity()); } if (other.hasMaximumCapacity()) { setMaximumCapacity(other.getMaximumCapacity()); } if (other.hasCurrentCapacity()) { setCurrentCapacity(other.getCurrentCapacity()); } if (other.hasState()) { setState(other.getState()); } if (childQueuesBuilder_ == null) { if (!other.childQueues_.isEmpty()) { if (childQueues_.isEmpty()) { childQueues_ = other.childQueues_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureChildQueuesIsMutable(); childQueues_.addAll(other.childQueues_); } onChanged(); } } else { if (!other.childQueues_.isEmpty()) { if (childQueuesBuilder_.isEmpty()) { childQueuesBuilder_.dispose(); childQueuesBuilder_ = null; childQueues_ = other.childQueues_; bitField0_ = (bitField0_ & ~0x00000020); childQueuesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getChildQueuesFieldBuilder() : null; } else { childQueuesBuilder_.addAllMessages(other.childQueues_); } } } if (applicationsBuilder_ == null) { if (!other.applications_.isEmpty()) { if (applications_.isEmpty()) { applications_ = other.applications_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureApplicationsIsMutable(); applications_.addAll(other.applications_); } onChanged(); } } else { if (!other.applications_.isEmpty()) { if (applicationsBuilder_.isEmpty()) { applicationsBuilder_.dispose(); applicationsBuilder_ = null; applications_ = other.applications_; bitField0_ = (bitField0_ & ~0x00000040); applicationsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationsFieldBuilder() : null; } else { applicationsBuilder_.addAllMessages(other.applications_); } } } if (!other.accessibleNodeLabels_.isEmpty()) { if (accessibleNodeLabels_.isEmpty()) { accessibleNodeLabels_ = other.accessibleNodeLabels_; bitField0_ = (bitField0_ & ~0x00000080); } else { ensureAccessibleNodeLabelsIsMutable(); accessibleNodeLabels_.addAll(other.accessibleNodeLabels_); } onChanged(); } if (other.hasDefaultNodeLabelExpression()) { defaultNodeLabelExpression_ = other.defaultNodeLabelExpression_; bitField0_ |= 0x00000100; onChanged(); } if (other.hasQueueStatistics()) { mergeQueueStatistics(other.getQueueStatistics()); } if (other.hasPreemptionDisabled()) { setPreemptionDisabled(other.getPreemptionDisabled()); } if (queueConfigurationsMapBuilder_ == null) { if (!other.queueConfigurationsMap_.isEmpty()) { if (queueConfigurationsMap_.isEmpty()) { queueConfigurationsMap_ = other.queueConfigurationsMap_; bitField0_ = (bitField0_ & ~0x00000800); } else { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.addAll(other.queueConfigurationsMap_); } onChanged(); } } else { if (!other.queueConfigurationsMap_.isEmpty()) { if (queueConfigurationsMapBuilder_.isEmpty()) { queueConfigurationsMapBuilder_.dispose(); queueConfigurationsMapBuilder_ = null; queueConfigurationsMap_ = other.queueConfigurationsMap_; bitField0_ = (bitField0_ & ~0x00000800); queueConfigurationsMapBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getQueueConfigurationsMapFieldBuilder() : null; } else { queueConfigurationsMapBuilder_.addAllMessages(other.queueConfigurationsMap_); } } } if (other.hasIntraQueuePreemptionDisabled()) { setIntraQueuePreemptionDisabled(other.getIntraQueuePreemptionDisabled()); } if (other.hasWeight()) { setWeight(other.getWeight()); } if (other.hasQueuePath()) { queuePath_ = other.queuePath_; bitField0_ |= 0x00004000; onChanged(); } if (other.hasMaxParallelApps()) { setMaxParallelApps(other.getMaxParallelApps()); } if (other.hasSchedulerType()) { schedulerType_ = other.schedulerType_; bitField0_ |= 0x00010000; onChanged(); } if (other.hasMinResourceVCore()) { setMinResourceVCore(other.getMinResourceVCore()); } if (other.hasMinResourceMemory()) { setMinResourceMemory(other.getMinResourceMemory()); } if (other.hasMaxResourceVCore()) { setMaxResourceVCore(other.getMaxResourceVCore()); } if (other.hasMaxResourceMemory()) { setMaxResourceMemory(other.getMaxResourceMemory()); } if (other.hasReservedResourceVCore()) { setReservedResourceVCore(other.getReservedResourceVCore()); } if (other.hasReservedResourceMemory()) { setReservedResourceMemory(other.getReservedResourceMemory()); } if (other.hasSteadyFairShareVCore()) { setSteadyFairShareVCore(other.getSteadyFairShareVCore()); } if (other.hasSteadyFairShareMemory()) { setSteadyFairShareMemory(other.getSteadyFairShareMemory()); } if (other.hasSubClusterId()) { subClusterId_ = other.subClusterId_; bitField0_ |= 0x02000000; onChanged(); } if (other.hasMaxRunningApp()) { setMaxRunningApp(other.getMaxRunningApp()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getChildQueuesCount(); i++) { if (!getChildQueues(i).isInitialized()) { return false; } } for (int i = 0; i < getApplicationsCount(); i++) { if (!getApplications(i).isInitialized()) { return false; } } for (int i = 0; i < getQueueConfigurationsMapCount(); i++) { if (!getQueueConfigurationsMap(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { queueName_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 21: { capacity_ = input.readFloat(); bitField0_ |= 0x00000002; break; } // case 21 case 29: { maximumCapacity_ = input.readFloat(); bitField0_ |= 0x00000004; break; } // case 29 case 37: { currentCapacity_ = input.readFloat(); bitField0_ |= 0x00000008; break; } // case 37 case 40: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(5, tmpRaw); } else { state_ = tmpRaw; bitField0_ |= 0x00000010; } break; } // case 40 case 50: { org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.PARSER, extensionRegistry); if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); childQueues_.add(m); } else { childQueuesBuilder_.addMessage(m); } break; } // case 50 case 58: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.PARSER, extensionRegistry); if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.add(m); } else { applicationsBuilder_.addMessage(m); } break; } // case 58 case 66: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureAccessibleNodeLabelsIsMutable(); accessibleNodeLabels_.add(bs); break; } // case 66 case 74: { defaultNodeLabelExpression_ = input.readBytes(); bitField0_ |= 0x00000100; break; } // case 74 case 82: { input.readMessage( getQueueStatisticsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000200; break; } // case 82 case 88: { preemptionDisabled_ = input.readBool(); bitField0_ |= 0x00000400; break; } // case 88 case 98: { org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.PARSER, extensionRegistry); if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.add(m); } else { queueConfigurationsMapBuilder_.addMessage(m); } break; } // case 98 case 104: { intraQueuePreemptionDisabled_ = input.readBool(); bitField0_ |= 0x00001000; break; } // case 104 case 117: { weight_ = input.readFloat(); bitField0_ |= 0x00002000; break; } // case 117 case 122: { queuePath_ = input.readBytes(); bitField0_ |= 0x00004000; break; } // case 122 case 128: { maxParallelApps_ = input.readInt32(); bitField0_ |= 0x00008000; break; } // case 128 case 138: { schedulerType_ = input.readBytes(); bitField0_ |= 0x00010000; break; } // case 138 case 144: { minResourceVCore_ = input.readInt32(); bitField0_ |= 0x00020000; break; } // case 144 case 152: { minResourceMemory_ = input.readInt64(); bitField0_ |= 0x00040000; break; } // case 152 case 160: { maxResourceVCore_ = input.readInt32(); bitField0_ |= 0x00080000; break; } // case 160 case 168: { maxResourceMemory_ = input.readInt64(); bitField0_ |= 0x00100000; break; } // case 168 case 176: { reservedResourceVCore_ = input.readInt32(); bitField0_ |= 0x00200000; break; } // case 176 case 184: { reservedResourceMemory_ = input.readInt64(); bitField0_ |= 0x00400000; break; } // case 184 case 192: { steadyFairShareVCore_ = input.readInt32(); bitField0_ |= 0x00800000; break; } // case 192 case 200: { steadyFairShareMemory_ = input.readInt64(); bitField0_ |= 0x01000000; break; } // case 200 case 210: { subClusterId_ = input.readBytes(); bitField0_ |= 0x02000000; break; } // case 210 case 216: { maxRunningApp_ = input.readInt32(); bitField0_ |= 0x04000000; break; } // case 216 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object queueName_ = ""; /** * optional string queueName = 1; * @return Whether the queueName field is set. */ public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; * @return The queueName. */ public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queueName = 1; * @return The bytes for queueName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queueName = 1; * @param value The queueName to set. * @return This builder for chaining. */ public Builder setQueueName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queueName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string queueName = 1; * @return This builder for chaining. */ public Builder clearQueueName() { queueName_ = getDefaultInstance().getQueueName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string queueName = 1; * @param value The bytes for queueName to set. * @return This builder for chaining. */ public Builder setQueueNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queueName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private float capacity_ ; /** * optional float capacity = 2; * @return Whether the capacity field is set. */ @java.lang.Override public boolean hasCapacity() { return ((bitField0_ & 0x00000002) != 0); } /** * optional float capacity = 2; * @return The capacity. */ @java.lang.Override public float getCapacity() { return capacity_; } /** * optional float capacity = 2; * @param value The capacity to set. * @return This builder for chaining. */ public Builder setCapacity(float value) { capacity_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional float capacity = 2; * @return This builder for chaining. */ public Builder clearCapacity() { bitField0_ = (bitField0_ & ~0x00000002); capacity_ = 0F; onChanged(); return this; } private float maximumCapacity_ ; /** * optional float maximumCapacity = 3; * @return Whether the maximumCapacity field is set. */ @java.lang.Override public boolean hasMaximumCapacity() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float maximumCapacity = 3; * @return The maximumCapacity. */ @java.lang.Override public float getMaximumCapacity() { return maximumCapacity_; } /** * optional float maximumCapacity = 3; * @param value The maximumCapacity to set. * @return This builder for chaining. */ public Builder setMaximumCapacity(float value) { maximumCapacity_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional float maximumCapacity = 3; * @return This builder for chaining. */ public Builder clearMaximumCapacity() { bitField0_ = (bitField0_ & ~0x00000004); maximumCapacity_ = 0F; onChanged(); return this; } private float currentCapacity_ ; /** * optional float currentCapacity = 4; * @return Whether the currentCapacity field is set. */ @java.lang.Override public boolean hasCurrentCapacity() { return ((bitField0_ & 0x00000008) != 0); } /** * optional float currentCapacity = 4; * @return The currentCapacity. */ @java.lang.Override public float getCurrentCapacity() { return currentCapacity_; } /** * optional float currentCapacity = 4; * @param value The currentCapacity to set. * @return This builder for chaining. */ public Builder setCurrentCapacity(float value) { currentCapacity_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional float currentCapacity = 4; * @return This builder for chaining. */ public Builder clearCurrentCapacity() { bitField0_ = (bitField0_ & ~0x00000008); currentCapacity_ = 0F; onChanged(); return this; } private int state_ = 1; /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return The state. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState() { org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(state_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.Q_STOPPED : result; } /** * optional .hadoop.yarn.QueueStateProto state = 5; * @param value The state to set. * @return This builder for chaining. */ public Builder setState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; state_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.QueueStateProto state = 5; * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000010); state_ = 1; onChanged(); return this; } private java.util.List childQueues_ = java.util.Collections.emptyList(); private void ensureChildQueuesIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { childQueues_ = new java.util.ArrayList(childQueues_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> childQueuesBuilder_; /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public java.util.List getChildQueuesList() { if (childQueuesBuilder_ == null) { return java.util.Collections.unmodifiableList(childQueues_); } else { return childQueuesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public int getChildQueuesCount() { if (childQueuesBuilder_ == null) { return childQueues_.size(); } else { return childQueuesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index) { if (childQueuesBuilder_ == null) { return childQueues_.get(index); } else { return childQueuesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder setChildQueues( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) { if (childQueuesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildQueuesIsMutable(); childQueues_.set(index, value); onChanged(); } else { childQueuesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder setChildQueues( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) { if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); childQueues_.set(index, builderForValue.build()); onChanged(); } else { childQueuesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder addChildQueues(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) { if (childQueuesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildQueuesIsMutable(); childQueues_.add(value); onChanged(); } else { childQueuesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder addChildQueues( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) { if (childQueuesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildQueuesIsMutable(); childQueues_.add(index, value); onChanged(); } else { childQueuesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder addChildQueues( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) { if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); childQueues_.add(builderForValue.build()); onChanged(); } else { childQueuesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder addChildQueues( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) { if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); childQueues_.add(index, builderForValue.build()); onChanged(); } else { childQueuesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder addAllChildQueues( java.lang.Iterable values) { if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, childQueues_); onChanged(); } else { childQueuesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder clearChildQueues() { if (childQueuesBuilder_ == null) { childQueues_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { childQueuesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public Builder removeChildQueues(int index) { if (childQueuesBuilder_ == null) { ensureChildQueuesIsMutable(); childQueues_.remove(index); onChanged(); } else { childQueuesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder getChildQueuesBuilder( int index) { return getChildQueuesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder( int index) { if (childQueuesBuilder_ == null) { return childQueues_.get(index); } else { return childQueuesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public java.util.List getChildQueuesOrBuilderList() { if (childQueuesBuilder_ != null) { return childQueuesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(childQueues_); } } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder addChildQueuesBuilder() { return getChildQueuesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder addChildQueuesBuilder( int index) { return getChildQueuesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueInfoProto childQueues = 6; */ public java.util.List getChildQueuesBuilderList() { return getChildQueuesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> getChildQueuesFieldBuilder() { if (childQueuesBuilder_ == null) { childQueuesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder>( childQueues_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); childQueues_ = null; } return childQueuesBuilder_; } private java.util.List applications_ = java.util.Collections.emptyList(); private void ensureApplicationsIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { applications_ = new java.util.ArrayList(applications_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationsBuilder_; /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public java.util.List getApplicationsList() { if (applicationsBuilder_ == null) { return java.util.Collections.unmodifiableList(applications_); } else { return applicationsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public int getApplicationsCount() { if (applicationsBuilder_ == null) { return applications_.size(); } else { return applicationsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) { if (applicationsBuilder_ == null) { return applications_.get(index); } else { return applicationsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder setApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.set(index, value); onChanged(); } else { applicationsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder setApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.set(index, builderForValue.build()); onChanged(); } else { applicationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder addApplications(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.add(value); onChanged(); } else { applicationsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder addApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) { if (applicationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationsIsMutable(); applications_.add(index, value); onChanged(); } else { applicationsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder addApplications( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.add(builderForValue.build()); onChanged(); } else { applicationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder addApplications( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.add(index, builderForValue.build()); onChanged(); } else { applicationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder addAllApplications( java.lang.Iterable values) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applications_); onChanged(); } else { applicationsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder clearApplications() { if (applicationsBuilder_ == null) { applications_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { applicationsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public Builder removeApplications(int index) { if (applicationsBuilder_ == null) { ensureApplicationsIsMutable(); applications_.remove(index); onChanged(); } else { applicationsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationsBuilder( int index) { return getApplicationsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder( int index) { if (applicationsBuilder_ == null) { return applications_.get(index); } else { return applicationsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public java.util.List getApplicationsOrBuilderList() { if (applicationsBuilder_ != null) { return applicationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applications_); } } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder() { return getApplicationsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder( int index) { return getApplicationsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationReportProto applications = 7; */ public java.util.List getApplicationsBuilderList() { return getApplicationsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> getApplicationsFieldBuilder() { if (applicationsBuilder_ == null) { applicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>( applications_, ((bitField0_ & 0x00000040) != 0), getParentForChildren(), isClean()); applications_ = null; } return applicationsBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList accessibleNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureAccessibleNodeLabelsIsMutable() { if (!((bitField0_ & 0x00000080) != 0)) { accessibleNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(accessibleNodeLabels_); bitField0_ |= 0x00000080; } } /** * repeated string accessibleNodeLabels = 8; * @return A list containing the accessibleNodeLabels. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAccessibleNodeLabelsList() { return accessibleNodeLabels_.getUnmodifiableView(); } /** * repeated string accessibleNodeLabels = 8; * @return The count of accessibleNodeLabels. */ public int getAccessibleNodeLabelsCount() { return accessibleNodeLabels_.size(); } /** * repeated string accessibleNodeLabels = 8; * @param index The index of the element to return. * @return The accessibleNodeLabels at the given index. */ public java.lang.String getAccessibleNodeLabels(int index) { return accessibleNodeLabels_.get(index); } /** * repeated string accessibleNodeLabels = 8; * @param index The index of the value to return. * @return The bytes of the accessibleNodeLabels at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAccessibleNodeLabelsBytes(int index) { return accessibleNodeLabels_.getByteString(index); } /** * repeated string accessibleNodeLabels = 8; * @param index The index to set the value at. * @param value The accessibleNodeLabels to set. * @return This builder for chaining. */ public Builder setAccessibleNodeLabels( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAccessibleNodeLabelsIsMutable(); accessibleNodeLabels_.set(index, value); onChanged(); return this; } /** * repeated string accessibleNodeLabels = 8; * @param value The accessibleNodeLabels to add. * @return This builder for chaining. */ public Builder addAccessibleNodeLabels( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAccessibleNodeLabelsIsMutable(); accessibleNodeLabels_.add(value); onChanged(); return this; } /** * repeated string accessibleNodeLabels = 8; * @param values The accessibleNodeLabels to add. * @return This builder for chaining. */ public Builder addAllAccessibleNodeLabels( java.lang.Iterable values) { ensureAccessibleNodeLabelsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, accessibleNodeLabels_); onChanged(); return this; } /** * repeated string accessibleNodeLabels = 8; * @return This builder for chaining. */ public Builder clearAccessibleNodeLabels() { accessibleNodeLabels_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * repeated string accessibleNodeLabels = 8; * @param value The bytes of the accessibleNodeLabels to add. * @return This builder for chaining. */ public Builder addAccessibleNodeLabelsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureAccessibleNodeLabelsIsMutable(); accessibleNodeLabels_.add(value); onChanged(); return this; } private java.lang.Object defaultNodeLabelExpression_ = ""; /** * optional string defaultNodeLabelExpression = 9; * @return Whether the defaultNodeLabelExpression field is set. */ public boolean hasDefaultNodeLabelExpression() { return ((bitField0_ & 0x00000100) != 0); } /** * optional string defaultNodeLabelExpression = 9; * @return The defaultNodeLabelExpression. */ public java.lang.String getDefaultNodeLabelExpression() { java.lang.Object ref = defaultNodeLabelExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { defaultNodeLabelExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string defaultNodeLabelExpression = 9; * @return The bytes for defaultNodeLabelExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDefaultNodeLabelExpressionBytes() { java.lang.Object ref = defaultNodeLabelExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); defaultNodeLabelExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string defaultNodeLabelExpression = 9; * @param value The defaultNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setDefaultNodeLabelExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } defaultNodeLabelExpression_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional string defaultNodeLabelExpression = 9; * @return This builder for chaining. */ public Builder clearDefaultNodeLabelExpression() { defaultNodeLabelExpression_ = getDefaultInstance().getDefaultNodeLabelExpression(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); return this; } /** * optional string defaultNodeLabelExpression = 9; * @param value The bytes for defaultNodeLabelExpression to set. * @return This builder for chaining. */ public Builder setDefaultNodeLabelExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } defaultNodeLabelExpression_ = value; bitField0_ |= 0x00000100; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto queueStatistics_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder> queueStatisticsBuilder_; /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return Whether the queueStatistics field is set. */ public boolean hasQueueStatistics() { return ((bitField0_ & 0x00000200) != 0); } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; * @return The queueStatistics. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics() { if (queueStatisticsBuilder_ == null) { return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_; } else { return queueStatisticsBuilder_.getMessage(); } } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public Builder setQueueStatistics(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto value) { if (queueStatisticsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } queueStatistics_ = value; } else { queueStatisticsBuilder_.setMessage(value); } bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public Builder setQueueStatistics( org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder builderForValue) { if (queueStatisticsBuilder_ == null) { queueStatistics_ = builderForValue.build(); } else { queueStatisticsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public Builder mergeQueueStatistics(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto value) { if (queueStatisticsBuilder_ == null) { if (((bitField0_ & 0x00000200) != 0) && queueStatistics_ != null && queueStatistics_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance()) { getQueueStatisticsBuilder().mergeFrom(value); } else { queueStatistics_ = value; } } else { queueStatisticsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; onChanged(); return this; } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public Builder clearQueueStatistics() { bitField0_ = (bitField0_ & ~0x00000200); queueStatistics_ = null; if (queueStatisticsBuilder_ != null) { queueStatisticsBuilder_.dispose(); queueStatisticsBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder getQueueStatisticsBuilder() { bitField0_ |= 0x00000200; onChanged(); return getQueueStatisticsFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder() { if (queueStatisticsBuilder_ != null) { return queueStatisticsBuilder_.getMessageOrBuilder(); } else { return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_; } } /** * optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder> getQueueStatisticsFieldBuilder() { if (queueStatisticsBuilder_ == null) { queueStatisticsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder>( getQueueStatistics(), getParentForChildren(), isClean()); queueStatistics_ = null; } return queueStatisticsBuilder_; } private boolean preemptionDisabled_ ; /** * optional bool preemptionDisabled = 11; * @return Whether the preemptionDisabled field is set. */ @java.lang.Override public boolean hasPreemptionDisabled() { return ((bitField0_ & 0x00000400) != 0); } /** * optional bool preemptionDisabled = 11; * @return The preemptionDisabled. */ @java.lang.Override public boolean getPreemptionDisabled() { return preemptionDisabled_; } /** * optional bool preemptionDisabled = 11; * @param value The preemptionDisabled to set. * @return This builder for chaining. */ public Builder setPreemptionDisabled(boolean value) { preemptionDisabled_ = value; bitField0_ |= 0x00000400; onChanged(); return this; } /** * optional bool preemptionDisabled = 11; * @return This builder for chaining. */ public Builder clearPreemptionDisabled() { bitField0_ = (bitField0_ & ~0x00000400); preemptionDisabled_ = false; onChanged(); return this; } private java.util.List queueConfigurationsMap_ = java.util.Collections.emptyList(); private void ensureQueueConfigurationsMapIsMutable() { if (!((bitField0_ & 0x00000800) != 0)) { queueConfigurationsMap_ = new java.util.ArrayList(queueConfigurationsMap_); bitField0_ |= 0x00000800; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> queueConfigurationsMapBuilder_; /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public java.util.List getQueueConfigurationsMapList() { if (queueConfigurationsMapBuilder_ == null) { return java.util.Collections.unmodifiableList(queueConfigurationsMap_); } else { return queueConfigurationsMapBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public int getQueueConfigurationsMapCount() { if (queueConfigurationsMapBuilder_ == null) { return queueConfigurationsMap_.size(); } else { return queueConfigurationsMapBuilder_.getCount(); } } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index) { if (queueConfigurationsMapBuilder_ == null) { return queueConfigurationsMap_.get(index); } else { return queueConfigurationsMapBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder setQueueConfigurationsMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) { if (queueConfigurationsMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.set(index, value); onChanged(); } else { queueConfigurationsMapBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder setQueueConfigurationsMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) { if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.set(index, builderForValue.build()); onChanged(); } else { queueConfigurationsMapBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder addQueueConfigurationsMap(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) { if (queueConfigurationsMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.add(value); onChanged(); } else { queueConfigurationsMapBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder addQueueConfigurationsMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) { if (queueConfigurationsMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.add(index, value); onChanged(); } else { queueConfigurationsMapBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder addQueueConfigurationsMap( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) { if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.add(builderForValue.build()); onChanged(); } else { queueConfigurationsMapBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder addQueueConfigurationsMap( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) { if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.add(index, builderForValue.build()); onChanged(); } else { queueConfigurationsMapBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder addAllQueueConfigurationsMap( java.lang.Iterable values) { if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, queueConfigurationsMap_); onChanged(); } else { queueConfigurationsMapBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder clearQueueConfigurationsMap() { if (queueConfigurationsMapBuilder_ == null) { queueConfigurationsMap_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000800); onChanged(); } else { queueConfigurationsMapBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public Builder removeQueueConfigurationsMap(int index) { if (queueConfigurationsMapBuilder_ == null) { ensureQueueConfigurationsMapIsMutable(); queueConfigurationsMap_.remove(index); onChanged(); } else { queueConfigurationsMapBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder getQueueConfigurationsMapBuilder( int index) { return getQueueConfigurationsMapFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder( int index) { if (queueConfigurationsMapBuilder_ == null) { return queueConfigurationsMap_.get(index); } else { return queueConfigurationsMapBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public java.util.List getQueueConfigurationsMapOrBuilderList() { if (queueConfigurationsMapBuilder_ != null) { return queueConfigurationsMapBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(queueConfigurationsMap_); } } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder addQueueConfigurationsMapBuilder() { return getQueueConfigurationsMapFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder addQueueConfigurationsMapBuilder( int index) { return getQueueConfigurationsMapFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12; */ public java.util.List getQueueConfigurationsMapBuilderList() { return getQueueConfigurationsMapFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> getQueueConfigurationsMapFieldBuilder() { if (queueConfigurationsMapBuilder_ == null) { queueConfigurationsMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder>( queueConfigurationsMap_, ((bitField0_ & 0x00000800) != 0), getParentForChildren(), isClean()); queueConfigurationsMap_ = null; } return queueConfigurationsMapBuilder_; } private boolean intraQueuePreemptionDisabled_ ; /** * optional bool intraQueuePreemptionDisabled = 13; * @return Whether the intraQueuePreemptionDisabled field is set. */ @java.lang.Override public boolean hasIntraQueuePreemptionDisabled() { return ((bitField0_ & 0x00001000) != 0); } /** * optional bool intraQueuePreemptionDisabled = 13; * @return The intraQueuePreemptionDisabled. */ @java.lang.Override public boolean getIntraQueuePreemptionDisabled() { return intraQueuePreemptionDisabled_; } /** * optional bool intraQueuePreemptionDisabled = 13; * @param value The intraQueuePreemptionDisabled to set. * @return This builder for chaining. */ public Builder setIntraQueuePreemptionDisabled(boolean value) { intraQueuePreemptionDisabled_ = value; bitField0_ |= 0x00001000; onChanged(); return this; } /** * optional bool intraQueuePreemptionDisabled = 13; * @return This builder for chaining. */ public Builder clearIntraQueuePreemptionDisabled() { bitField0_ = (bitField0_ & ~0x00001000); intraQueuePreemptionDisabled_ = false; onChanged(); return this; } private float weight_ ; /** * optional float weight = 14; * @return Whether the weight field is set. */ @java.lang.Override public boolean hasWeight() { return ((bitField0_ & 0x00002000) != 0); } /** * optional float weight = 14; * @return The weight. */ @java.lang.Override public float getWeight() { return weight_; } /** * optional float weight = 14; * @param value The weight to set. * @return This builder for chaining. */ public Builder setWeight(float value) { weight_ = value; bitField0_ |= 0x00002000; onChanged(); return this; } /** * optional float weight = 14; * @return This builder for chaining. */ public Builder clearWeight() { bitField0_ = (bitField0_ & ~0x00002000); weight_ = 0F; onChanged(); return this; } private java.lang.Object queuePath_ = ""; /** * optional string queuePath = 15; * @return Whether the queuePath field is set. */ public boolean hasQueuePath() { return ((bitField0_ & 0x00004000) != 0); } /** * optional string queuePath = 15; * @return The queuePath. */ public java.lang.String getQueuePath() { java.lang.Object ref = queuePath_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queuePath_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queuePath = 15; * @return The bytes for queuePath. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueuePathBytes() { java.lang.Object ref = queuePath_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queuePath_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queuePath = 15; * @param value The queuePath to set. * @return This builder for chaining. */ public Builder setQueuePath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queuePath_ = value; bitField0_ |= 0x00004000; onChanged(); return this; } /** * optional string queuePath = 15; * @return This builder for chaining. */ public Builder clearQueuePath() { queuePath_ = getDefaultInstance().getQueuePath(); bitField0_ = (bitField0_ & ~0x00004000); onChanged(); return this; } /** * optional string queuePath = 15; * @param value The bytes for queuePath to set. * @return This builder for chaining. */ public Builder setQueuePathBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queuePath_ = value; bitField0_ |= 0x00004000; onChanged(); return this; } private int maxParallelApps_ ; /** * optional int32 maxParallelApps = 16; * @return Whether the maxParallelApps field is set. */ @java.lang.Override public boolean hasMaxParallelApps() { return ((bitField0_ & 0x00008000) != 0); } /** * optional int32 maxParallelApps = 16; * @return The maxParallelApps. */ @java.lang.Override public int getMaxParallelApps() { return maxParallelApps_; } /** * optional int32 maxParallelApps = 16; * @param value The maxParallelApps to set. * @return This builder for chaining. */ public Builder setMaxParallelApps(int value) { maxParallelApps_ = value; bitField0_ |= 0x00008000; onChanged(); return this; } /** * optional int32 maxParallelApps = 16; * @return This builder for chaining. */ public Builder clearMaxParallelApps() { bitField0_ = (bitField0_ & ~0x00008000); maxParallelApps_ = 0; onChanged(); return this; } private java.lang.Object schedulerType_ = ""; /** * optional string schedulerType = 17; * @return Whether the schedulerType field is set. */ public boolean hasSchedulerType() { return ((bitField0_ & 0x00010000) != 0); } /** * optional string schedulerType = 17; * @return The schedulerType. */ public java.lang.String getSchedulerType() { java.lang.Object ref = schedulerType_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { schedulerType_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string schedulerType = 17; * @return The bytes for schedulerType. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSchedulerTypeBytes() { java.lang.Object ref = schedulerType_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); schedulerType_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string schedulerType = 17; * @param value The schedulerType to set. * @return This builder for chaining. */ public Builder setSchedulerType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } schedulerType_ = value; bitField0_ |= 0x00010000; onChanged(); return this; } /** * optional string schedulerType = 17; * @return This builder for chaining. */ public Builder clearSchedulerType() { schedulerType_ = getDefaultInstance().getSchedulerType(); bitField0_ = (bitField0_ & ~0x00010000); onChanged(); return this; } /** * optional string schedulerType = 17; * @param value The bytes for schedulerType to set. * @return This builder for chaining. */ public Builder setSchedulerTypeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } schedulerType_ = value; bitField0_ |= 0x00010000; onChanged(); return this; } private int minResourceVCore_ ; /** * optional int32 minResourceVCore = 18; * @return Whether the minResourceVCore field is set. */ @java.lang.Override public boolean hasMinResourceVCore() { return ((bitField0_ & 0x00020000) != 0); } /** * optional int32 minResourceVCore = 18; * @return The minResourceVCore. */ @java.lang.Override public int getMinResourceVCore() { return minResourceVCore_; } /** * optional int32 minResourceVCore = 18; * @param value The minResourceVCore to set. * @return This builder for chaining. */ public Builder setMinResourceVCore(int value) { minResourceVCore_ = value; bitField0_ |= 0x00020000; onChanged(); return this; } /** * optional int32 minResourceVCore = 18; * @return This builder for chaining. */ public Builder clearMinResourceVCore() { bitField0_ = (bitField0_ & ~0x00020000); minResourceVCore_ = 0; onChanged(); return this; } private long minResourceMemory_ ; /** * optional int64 minResourceMemory = 19; * @return Whether the minResourceMemory field is set. */ @java.lang.Override public boolean hasMinResourceMemory() { return ((bitField0_ & 0x00040000) != 0); } /** * optional int64 minResourceMemory = 19; * @return The minResourceMemory. */ @java.lang.Override public long getMinResourceMemory() { return minResourceMemory_; } /** * optional int64 minResourceMemory = 19; * @param value The minResourceMemory to set. * @return This builder for chaining. */ public Builder setMinResourceMemory(long value) { minResourceMemory_ = value; bitField0_ |= 0x00040000; onChanged(); return this; } /** * optional int64 minResourceMemory = 19; * @return This builder for chaining. */ public Builder clearMinResourceMemory() { bitField0_ = (bitField0_ & ~0x00040000); minResourceMemory_ = 0L; onChanged(); return this; } private int maxResourceVCore_ ; /** * optional int32 maxResourceVCore = 20; * @return Whether the maxResourceVCore field is set. */ @java.lang.Override public boolean hasMaxResourceVCore() { return ((bitField0_ & 0x00080000) != 0); } /** * optional int32 maxResourceVCore = 20; * @return The maxResourceVCore. */ @java.lang.Override public int getMaxResourceVCore() { return maxResourceVCore_; } /** * optional int32 maxResourceVCore = 20; * @param value The maxResourceVCore to set. * @return This builder for chaining. */ public Builder setMaxResourceVCore(int value) { maxResourceVCore_ = value; bitField0_ |= 0x00080000; onChanged(); return this; } /** * optional int32 maxResourceVCore = 20; * @return This builder for chaining. */ public Builder clearMaxResourceVCore() { bitField0_ = (bitField0_ & ~0x00080000); maxResourceVCore_ = 0; onChanged(); return this; } private long maxResourceMemory_ ; /** * optional int64 maxResourceMemory = 21; * @return Whether the maxResourceMemory field is set. */ @java.lang.Override public boolean hasMaxResourceMemory() { return ((bitField0_ & 0x00100000) != 0); } /** * optional int64 maxResourceMemory = 21; * @return The maxResourceMemory. */ @java.lang.Override public long getMaxResourceMemory() { return maxResourceMemory_; } /** * optional int64 maxResourceMemory = 21; * @param value The maxResourceMemory to set. * @return This builder for chaining. */ public Builder setMaxResourceMemory(long value) { maxResourceMemory_ = value; bitField0_ |= 0x00100000; onChanged(); return this; } /** * optional int64 maxResourceMemory = 21; * @return This builder for chaining. */ public Builder clearMaxResourceMemory() { bitField0_ = (bitField0_ & ~0x00100000); maxResourceMemory_ = 0L; onChanged(); return this; } private int reservedResourceVCore_ ; /** * optional int32 reservedResourceVCore = 22; * @return Whether the reservedResourceVCore field is set. */ @java.lang.Override public boolean hasReservedResourceVCore() { return ((bitField0_ & 0x00200000) != 0); } /** * optional int32 reservedResourceVCore = 22; * @return The reservedResourceVCore. */ @java.lang.Override public int getReservedResourceVCore() { return reservedResourceVCore_; } /** * optional int32 reservedResourceVCore = 22; * @param value The reservedResourceVCore to set. * @return This builder for chaining. */ public Builder setReservedResourceVCore(int value) { reservedResourceVCore_ = value; bitField0_ |= 0x00200000; onChanged(); return this; } /** * optional int32 reservedResourceVCore = 22; * @return This builder for chaining. */ public Builder clearReservedResourceVCore() { bitField0_ = (bitField0_ & ~0x00200000); reservedResourceVCore_ = 0; onChanged(); return this; } private long reservedResourceMemory_ ; /** * optional int64 reservedResourceMemory = 23; * @return Whether the reservedResourceMemory field is set. */ @java.lang.Override public boolean hasReservedResourceMemory() { return ((bitField0_ & 0x00400000) != 0); } /** * optional int64 reservedResourceMemory = 23; * @return The reservedResourceMemory. */ @java.lang.Override public long getReservedResourceMemory() { return reservedResourceMemory_; } /** * optional int64 reservedResourceMemory = 23; * @param value The reservedResourceMemory to set. * @return This builder for chaining. */ public Builder setReservedResourceMemory(long value) { reservedResourceMemory_ = value; bitField0_ |= 0x00400000; onChanged(); return this; } /** * optional int64 reservedResourceMemory = 23; * @return This builder for chaining. */ public Builder clearReservedResourceMemory() { bitField0_ = (bitField0_ & ~0x00400000); reservedResourceMemory_ = 0L; onChanged(); return this; } private int steadyFairShareVCore_ ; /** * optional int32 steadyFairShareVCore = 24; * @return Whether the steadyFairShareVCore field is set. */ @java.lang.Override public boolean hasSteadyFairShareVCore() { return ((bitField0_ & 0x00800000) != 0); } /** * optional int32 steadyFairShareVCore = 24; * @return The steadyFairShareVCore. */ @java.lang.Override public int getSteadyFairShareVCore() { return steadyFairShareVCore_; } /** * optional int32 steadyFairShareVCore = 24; * @param value The steadyFairShareVCore to set. * @return This builder for chaining. */ public Builder setSteadyFairShareVCore(int value) { steadyFairShareVCore_ = value; bitField0_ |= 0x00800000; onChanged(); return this; } /** * optional int32 steadyFairShareVCore = 24; * @return This builder for chaining. */ public Builder clearSteadyFairShareVCore() { bitField0_ = (bitField0_ & ~0x00800000); steadyFairShareVCore_ = 0; onChanged(); return this; } private long steadyFairShareMemory_ ; /** * optional int64 steadyFairShareMemory = 25; * @return Whether the steadyFairShareMemory field is set. */ @java.lang.Override public boolean hasSteadyFairShareMemory() { return ((bitField0_ & 0x01000000) != 0); } /** * optional int64 steadyFairShareMemory = 25; * @return The steadyFairShareMemory. */ @java.lang.Override public long getSteadyFairShareMemory() { return steadyFairShareMemory_; } /** * optional int64 steadyFairShareMemory = 25; * @param value The steadyFairShareMemory to set. * @return This builder for chaining. */ public Builder setSteadyFairShareMemory(long value) { steadyFairShareMemory_ = value; bitField0_ |= 0x01000000; onChanged(); return this; } /** * optional int64 steadyFairShareMemory = 25; * @return This builder for chaining. */ public Builder clearSteadyFairShareMemory() { bitField0_ = (bitField0_ & ~0x01000000); steadyFairShareMemory_ = 0L; onChanged(); return this; } private java.lang.Object subClusterId_ = ""; /** * optional string subClusterId = 26; * @return Whether the subClusterId field is set. */ public boolean hasSubClusterId() { return ((bitField0_ & 0x02000000) != 0); } /** * optional string subClusterId = 26; * @return The subClusterId. */ public java.lang.String getSubClusterId() { java.lang.Object ref = subClusterId_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { subClusterId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string subClusterId = 26; * @return The bytes for subClusterId. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getSubClusterIdBytes() { java.lang.Object ref = subClusterId_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); subClusterId_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string subClusterId = 26; * @param value The subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x02000000; onChanged(); return this; } /** * optional string subClusterId = 26; * @return This builder for chaining. */ public Builder clearSubClusterId() { subClusterId_ = getDefaultInstance().getSubClusterId(); bitField0_ = (bitField0_ & ~0x02000000); onChanged(); return this; } /** * optional string subClusterId = 26; * @param value The bytes for subClusterId to set. * @return This builder for chaining. */ public Builder setSubClusterIdBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } subClusterId_ = value; bitField0_ |= 0x02000000; onChanged(); return this; } private int maxRunningApp_ ; /** * optional int32 maxRunningApp = 27; * @return Whether the maxRunningApp field is set. */ @java.lang.Override public boolean hasMaxRunningApp() { return ((bitField0_ & 0x04000000) != 0); } /** * optional int32 maxRunningApp = 27; * @return The maxRunningApp. */ @java.lang.Override public int getMaxRunningApp() { return maxRunningApp_; } /** * optional int32 maxRunningApp = 27; * @param value The maxRunningApp to set. * @return This builder for chaining. */ public Builder setMaxRunningApp(int value) { maxRunningApp_ = value; bitField0_ |= 0x04000000; onChanged(); return this; } /** * optional int32 maxRunningApp = 27; * @return This builder for chaining. */ public Builder clearMaxRunningApp() { bitField0_ = (bitField0_ & ~0x04000000); maxRunningApp_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueInfoProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueInfoProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public QueueInfoProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface QueueConfigurationsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueConfigurationsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional float capacity = 1; * @return Whether the capacity field is set. */ boolean hasCapacity(); /** * optional float capacity = 1; * @return The capacity. */ float getCapacity(); /** * optional float absoluteCapacity = 2; * @return Whether the absoluteCapacity field is set. */ boolean hasAbsoluteCapacity(); /** * optional float absoluteCapacity = 2; * @return The absoluteCapacity. */ float getAbsoluteCapacity(); /** * optional float maxCapacity = 3; * @return Whether the maxCapacity field is set. */ boolean hasMaxCapacity(); /** * optional float maxCapacity = 3; * @return The maxCapacity. */ float getMaxCapacity(); /** * optional float absoluteMaxCapacity = 4; * @return Whether the absoluteMaxCapacity field is set. */ boolean hasAbsoluteMaxCapacity(); /** * optional float absoluteMaxCapacity = 4; * @return The absoluteMaxCapacity. */ float getAbsoluteMaxCapacity(); /** * optional float maxAMPercentage = 5; * @return Whether the maxAMPercentage field is set. */ boolean hasMaxAMPercentage(); /** * optional float maxAMPercentage = 5; * @return The maxAMPercentage. */ float getMaxAMPercentage(); /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return Whether the effectiveMinCapacity field is set. */ boolean hasEffectiveMinCapacity(); /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return The effectiveMinCapacity. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity(); /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder(); /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return Whether the effectiveMaxCapacity field is set. */ boolean hasEffectiveMaxCapacity(); /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return The effectiveMaxCapacity. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity(); /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder(); /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return Whether the configuredMinCapacity field is set. */ boolean hasConfiguredMinCapacity(); /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return The configuredMinCapacity. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity(); /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder(); /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return Whether the configuredMaxCapacity field is set. */ boolean hasConfiguredMaxCapacity(); /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return The configuredMaxCapacity. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity(); /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.QueueConfigurationsProto} */ public static final class QueueConfigurationsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueConfigurationsProto) QueueConfigurationsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use QueueConfigurationsProto.newBuilder() to construct. private QueueConfigurationsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private QueueConfigurationsProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new QueueConfigurationsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder.class); } private int bitField0_; public static final int CAPACITY_FIELD_NUMBER = 1; private float capacity_ = 0F; /** * optional float capacity = 1; * @return Whether the capacity field is set. */ @java.lang.Override public boolean hasCapacity() { return ((bitField0_ & 0x00000001) != 0); } /** * optional float capacity = 1; * @return The capacity. */ @java.lang.Override public float getCapacity() { return capacity_; } public static final int ABSOLUTECAPACITY_FIELD_NUMBER = 2; private float absoluteCapacity_ = 0F; /** * optional float absoluteCapacity = 2; * @return Whether the absoluteCapacity field is set. */ @java.lang.Override public boolean hasAbsoluteCapacity() { return ((bitField0_ & 0x00000002) != 0); } /** * optional float absoluteCapacity = 2; * @return The absoluteCapacity. */ @java.lang.Override public float getAbsoluteCapacity() { return absoluteCapacity_; } public static final int MAXCAPACITY_FIELD_NUMBER = 3; private float maxCapacity_ = 0F; /** * optional float maxCapacity = 3; * @return Whether the maxCapacity field is set. */ @java.lang.Override public boolean hasMaxCapacity() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float maxCapacity = 3; * @return The maxCapacity. */ @java.lang.Override public float getMaxCapacity() { return maxCapacity_; } public static final int ABSOLUTEMAXCAPACITY_FIELD_NUMBER = 4; private float absoluteMaxCapacity_ = 0F; /** * optional float absoluteMaxCapacity = 4; * @return Whether the absoluteMaxCapacity field is set. */ @java.lang.Override public boolean hasAbsoluteMaxCapacity() { return ((bitField0_ & 0x00000008) != 0); } /** * optional float absoluteMaxCapacity = 4; * @return The absoluteMaxCapacity. */ @java.lang.Override public float getAbsoluteMaxCapacity() { return absoluteMaxCapacity_; } public static final int MAXAMPERCENTAGE_FIELD_NUMBER = 5; private float maxAMPercentage_ = 0F; /** * optional float maxAMPercentage = 5; * @return Whether the maxAMPercentage field is set. */ @java.lang.Override public boolean hasMaxAMPercentage() { return ((bitField0_ & 0x00000010) != 0); } /** * optional float maxAMPercentage = 5; * @return The maxAMPercentage. */ @java.lang.Override public float getMaxAMPercentage() { return maxAMPercentage_; } public static final int EFFECTIVEMINCAPACITY_FIELD_NUMBER = 6; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMinCapacity_; /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return Whether the effectiveMinCapacity field is set. */ @java.lang.Override public boolean hasEffectiveMinCapacity() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return The effectiveMinCapacity. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity() { return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_; } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder() { return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_; } public static final int EFFECTIVEMAXCAPACITY_FIELD_NUMBER = 7; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMaxCapacity_; /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return Whether the effectiveMaxCapacity field is set. */ @java.lang.Override public boolean hasEffectiveMaxCapacity() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return The effectiveMaxCapacity. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity() { return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_; } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder() { return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_; } public static final int CONFIGUREDMINCAPACITY_FIELD_NUMBER = 8; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMinCapacity_; /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return Whether the configuredMinCapacity field is set. */ @java.lang.Override public boolean hasConfiguredMinCapacity() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return The configuredMinCapacity. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity() { return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_; } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder() { return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_; } public static final int CONFIGUREDMAXCAPACITY_FIELD_NUMBER = 9; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMaxCapacity_; /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return Whether the configuredMaxCapacity field is set. */ @java.lang.Override public boolean hasConfiguredMaxCapacity() { return ((bitField0_ & 0x00000100) != 0); } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return The configuredMaxCapacity. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity() { return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_; } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder() { return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasEffectiveMinCapacity()) { if (!getEffectiveMinCapacity().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasEffectiveMaxCapacity()) { if (!getEffectiveMaxCapacity().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasConfiguredMinCapacity()) { if (!getConfiguredMinCapacity().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasConfiguredMaxCapacity()) { if (!getConfiguredMaxCapacity().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeFloat(1, capacity_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeFloat(2, absoluteCapacity_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeFloat(3, maxCapacity_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeFloat(4, absoluteMaxCapacity_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeFloat(5, maxAMPercentage_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeMessage(6, getEffectiveMinCapacity()); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(7, getEffectiveMaxCapacity()); } if (((bitField0_ & 0x00000080) != 0)) { output.writeMessage(8, getConfiguredMinCapacity()); } if (((bitField0_ & 0x00000100) != 0)) { output.writeMessage(9, getConfiguredMaxCapacity()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(1, capacity_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(2, absoluteCapacity_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(3, maxCapacity_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(4, absoluteMaxCapacity_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeFloatSize(5, maxAMPercentage_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, getEffectiveMinCapacity()); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, getEffectiveMaxCapacity()); } if (((bitField0_ & 0x00000080) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(8, getConfiguredMinCapacity()); } if (((bitField0_ & 0x00000100) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(9, getConfiguredMaxCapacity()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto) obj; if (hasCapacity() != other.hasCapacity()) return false; if (hasCapacity()) { if (java.lang.Float.floatToIntBits(getCapacity()) != java.lang.Float.floatToIntBits( other.getCapacity())) return false; } if (hasAbsoluteCapacity() != other.hasAbsoluteCapacity()) return false; if (hasAbsoluteCapacity()) { if (java.lang.Float.floatToIntBits(getAbsoluteCapacity()) != java.lang.Float.floatToIntBits( other.getAbsoluteCapacity())) return false; } if (hasMaxCapacity() != other.hasMaxCapacity()) return false; if (hasMaxCapacity()) { if (java.lang.Float.floatToIntBits(getMaxCapacity()) != java.lang.Float.floatToIntBits( other.getMaxCapacity())) return false; } if (hasAbsoluteMaxCapacity() != other.hasAbsoluteMaxCapacity()) return false; if (hasAbsoluteMaxCapacity()) { if (java.lang.Float.floatToIntBits(getAbsoluteMaxCapacity()) != java.lang.Float.floatToIntBits( other.getAbsoluteMaxCapacity())) return false; } if (hasMaxAMPercentage() != other.hasMaxAMPercentage()) return false; if (hasMaxAMPercentage()) { if (java.lang.Float.floatToIntBits(getMaxAMPercentage()) != java.lang.Float.floatToIntBits( other.getMaxAMPercentage())) return false; } if (hasEffectiveMinCapacity() != other.hasEffectiveMinCapacity()) return false; if (hasEffectiveMinCapacity()) { if (!getEffectiveMinCapacity() .equals(other.getEffectiveMinCapacity())) return false; } if (hasEffectiveMaxCapacity() != other.hasEffectiveMaxCapacity()) return false; if (hasEffectiveMaxCapacity()) { if (!getEffectiveMaxCapacity() .equals(other.getEffectiveMaxCapacity())) return false; } if (hasConfiguredMinCapacity() != other.hasConfiguredMinCapacity()) return false; if (hasConfiguredMinCapacity()) { if (!getConfiguredMinCapacity() .equals(other.getConfiguredMinCapacity())) return false; } if (hasConfiguredMaxCapacity() != other.hasConfiguredMaxCapacity()) return false; if (hasConfiguredMaxCapacity()) { if (!getConfiguredMaxCapacity() .equals(other.getConfiguredMaxCapacity())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCapacity()) { hash = (37 * hash) + CAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getCapacity()); } if (hasAbsoluteCapacity()) { hash = (37 * hash) + ABSOLUTECAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getAbsoluteCapacity()); } if (hasMaxCapacity()) { hash = (37 * hash) + MAXCAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getMaxCapacity()); } if (hasAbsoluteMaxCapacity()) { hash = (37 * hash) + ABSOLUTEMAXCAPACITY_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getAbsoluteMaxCapacity()); } if (hasMaxAMPercentage()) { hash = (37 * hash) + MAXAMPERCENTAGE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getMaxAMPercentage()); } if (hasEffectiveMinCapacity()) { hash = (37 * hash) + EFFECTIVEMINCAPACITY_FIELD_NUMBER; hash = (53 * hash) + getEffectiveMinCapacity().hashCode(); } if (hasEffectiveMaxCapacity()) { hash = (37 * hash) + EFFECTIVEMAXCAPACITY_FIELD_NUMBER; hash = (53 * hash) + getEffectiveMaxCapacity().hashCode(); } if (hasConfiguredMinCapacity()) { hash = (37 * hash) + CONFIGUREDMINCAPACITY_FIELD_NUMBER; hash = (53 * hash) + getConfiguredMinCapacity().hashCode(); } if (hasConfiguredMaxCapacity()) { hash = (37 * hash) + CONFIGUREDMAXCAPACITY_FIELD_NUMBER; hash = (53 * hash) + getConfiguredMaxCapacity().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.QueueConfigurationsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueConfigurationsProto) org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getEffectiveMinCapacityFieldBuilder(); getEffectiveMaxCapacityFieldBuilder(); getConfiguredMinCapacityFieldBuilder(); getConfiguredMaxCapacityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; capacity_ = 0F; absoluteCapacity_ = 0F; maxCapacity_ = 0F; absoluteMaxCapacity_ = 0F; maxAMPercentage_ = 0F; effectiveMinCapacity_ = null; if (effectiveMinCapacityBuilder_ != null) { effectiveMinCapacityBuilder_.dispose(); effectiveMinCapacityBuilder_ = null; } effectiveMaxCapacity_ = null; if (effectiveMaxCapacityBuilder_ != null) { effectiveMaxCapacityBuilder_.dispose(); effectiveMaxCapacityBuilder_ = null; } configuredMinCapacity_ = null; if (configuredMinCapacityBuilder_ != null) { configuredMinCapacityBuilder_.dispose(); configuredMinCapacityBuilder_ = null; } configuredMaxCapacity_ = null; if (configuredMaxCapacityBuilder_ != null) { configuredMaxCapacityBuilder_.dispose(); configuredMaxCapacityBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.capacity_ = capacity_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.absoluteCapacity_ = absoluteCapacity_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.maxCapacity_ = maxCapacity_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.absoluteMaxCapacity_ = absoluteMaxCapacity_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.maxAMPercentage_ = maxAMPercentage_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.effectiveMinCapacity_ = effectiveMinCapacityBuilder_ == null ? effectiveMinCapacity_ : effectiveMinCapacityBuilder_.build(); to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000040) != 0)) { result.effectiveMaxCapacity_ = effectiveMaxCapacityBuilder_ == null ? effectiveMaxCapacity_ : effectiveMaxCapacityBuilder_.build(); to_bitField0_ |= 0x00000040; } if (((from_bitField0_ & 0x00000080) != 0)) { result.configuredMinCapacity_ = configuredMinCapacityBuilder_ == null ? configuredMinCapacity_ : configuredMinCapacityBuilder_.build(); to_bitField0_ |= 0x00000080; } if (((from_bitField0_ & 0x00000100) != 0)) { result.configuredMaxCapacity_ = configuredMaxCapacityBuilder_ == null ? configuredMaxCapacity_ : configuredMaxCapacityBuilder_.build(); to_bitField0_ |= 0x00000100; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance()) return this; if (other.hasCapacity()) { setCapacity(other.getCapacity()); } if (other.hasAbsoluteCapacity()) { setAbsoluteCapacity(other.getAbsoluteCapacity()); } if (other.hasMaxCapacity()) { setMaxCapacity(other.getMaxCapacity()); } if (other.hasAbsoluteMaxCapacity()) { setAbsoluteMaxCapacity(other.getAbsoluteMaxCapacity()); } if (other.hasMaxAMPercentage()) { setMaxAMPercentage(other.getMaxAMPercentage()); } if (other.hasEffectiveMinCapacity()) { mergeEffectiveMinCapacity(other.getEffectiveMinCapacity()); } if (other.hasEffectiveMaxCapacity()) { mergeEffectiveMaxCapacity(other.getEffectiveMaxCapacity()); } if (other.hasConfiguredMinCapacity()) { mergeConfiguredMinCapacity(other.getConfiguredMinCapacity()); } if (other.hasConfiguredMaxCapacity()) { mergeConfiguredMaxCapacity(other.getConfiguredMaxCapacity()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasEffectiveMinCapacity()) { if (!getEffectiveMinCapacity().isInitialized()) { return false; } } if (hasEffectiveMaxCapacity()) { if (!getEffectiveMaxCapacity().isInitialized()) { return false; } } if (hasConfiguredMinCapacity()) { if (!getConfiguredMinCapacity().isInitialized()) { return false; } } if (hasConfiguredMaxCapacity()) { if (!getConfiguredMaxCapacity().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 13: { capacity_ = input.readFloat(); bitField0_ |= 0x00000001; break; } // case 13 case 21: { absoluteCapacity_ = input.readFloat(); bitField0_ |= 0x00000002; break; } // case 21 case 29: { maxCapacity_ = input.readFloat(); bitField0_ |= 0x00000004; break; } // case 29 case 37: { absoluteMaxCapacity_ = input.readFloat(); bitField0_ |= 0x00000008; break; } // case 37 case 45: { maxAMPercentage_ = input.readFloat(); bitField0_ |= 0x00000010; break; } // case 45 case 50: { input.readMessage( getEffectiveMinCapacityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000020; break; } // case 50 case 58: { input.readMessage( getEffectiveMaxCapacityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000040; break; } // case 58 case 66: { input.readMessage( getConfiguredMinCapacityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000080; break; } // case 66 case 74: { input.readMessage( getConfiguredMaxCapacityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000100; break; } // case 74 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private float capacity_ ; /** * optional float capacity = 1; * @return Whether the capacity field is set. */ @java.lang.Override public boolean hasCapacity() { return ((bitField0_ & 0x00000001) != 0); } /** * optional float capacity = 1; * @return The capacity. */ @java.lang.Override public float getCapacity() { return capacity_; } /** * optional float capacity = 1; * @param value The capacity to set. * @return This builder for chaining. */ public Builder setCapacity(float value) { capacity_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional float capacity = 1; * @return This builder for chaining. */ public Builder clearCapacity() { bitField0_ = (bitField0_ & ~0x00000001); capacity_ = 0F; onChanged(); return this; } private float absoluteCapacity_ ; /** * optional float absoluteCapacity = 2; * @return Whether the absoluteCapacity field is set. */ @java.lang.Override public boolean hasAbsoluteCapacity() { return ((bitField0_ & 0x00000002) != 0); } /** * optional float absoluteCapacity = 2; * @return The absoluteCapacity. */ @java.lang.Override public float getAbsoluteCapacity() { return absoluteCapacity_; } /** * optional float absoluteCapacity = 2; * @param value The absoluteCapacity to set. * @return This builder for chaining. */ public Builder setAbsoluteCapacity(float value) { absoluteCapacity_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional float absoluteCapacity = 2; * @return This builder for chaining. */ public Builder clearAbsoluteCapacity() { bitField0_ = (bitField0_ & ~0x00000002); absoluteCapacity_ = 0F; onChanged(); return this; } private float maxCapacity_ ; /** * optional float maxCapacity = 3; * @return Whether the maxCapacity field is set. */ @java.lang.Override public boolean hasMaxCapacity() { return ((bitField0_ & 0x00000004) != 0); } /** * optional float maxCapacity = 3; * @return The maxCapacity. */ @java.lang.Override public float getMaxCapacity() { return maxCapacity_; } /** * optional float maxCapacity = 3; * @param value The maxCapacity to set. * @return This builder for chaining. */ public Builder setMaxCapacity(float value) { maxCapacity_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional float maxCapacity = 3; * @return This builder for chaining. */ public Builder clearMaxCapacity() { bitField0_ = (bitField0_ & ~0x00000004); maxCapacity_ = 0F; onChanged(); return this; } private float absoluteMaxCapacity_ ; /** * optional float absoluteMaxCapacity = 4; * @return Whether the absoluteMaxCapacity field is set. */ @java.lang.Override public boolean hasAbsoluteMaxCapacity() { return ((bitField0_ & 0x00000008) != 0); } /** * optional float absoluteMaxCapacity = 4; * @return The absoluteMaxCapacity. */ @java.lang.Override public float getAbsoluteMaxCapacity() { return absoluteMaxCapacity_; } /** * optional float absoluteMaxCapacity = 4; * @param value The absoluteMaxCapacity to set. * @return This builder for chaining. */ public Builder setAbsoluteMaxCapacity(float value) { absoluteMaxCapacity_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional float absoluteMaxCapacity = 4; * @return This builder for chaining. */ public Builder clearAbsoluteMaxCapacity() { bitField0_ = (bitField0_ & ~0x00000008); absoluteMaxCapacity_ = 0F; onChanged(); return this; } private float maxAMPercentage_ ; /** * optional float maxAMPercentage = 5; * @return Whether the maxAMPercentage field is set. */ @java.lang.Override public boolean hasMaxAMPercentage() { return ((bitField0_ & 0x00000010) != 0); } /** * optional float maxAMPercentage = 5; * @return The maxAMPercentage. */ @java.lang.Override public float getMaxAMPercentage() { return maxAMPercentage_; } /** * optional float maxAMPercentage = 5; * @param value The maxAMPercentage to set. * @return This builder for chaining. */ public Builder setMaxAMPercentage(float value) { maxAMPercentage_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional float maxAMPercentage = 5; * @return This builder for chaining. */ public Builder clearMaxAMPercentage() { bitField0_ = (bitField0_ & ~0x00000010); maxAMPercentage_ = 0F; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMinCapacity_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> effectiveMinCapacityBuilder_; /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return Whether the effectiveMinCapacity field is set. */ public boolean hasEffectiveMinCapacity() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; * @return The effectiveMinCapacity. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity() { if (effectiveMinCapacityBuilder_ == null) { return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_; } else { return effectiveMinCapacityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public Builder setEffectiveMinCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (effectiveMinCapacityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } effectiveMinCapacity_ = value; } else { effectiveMinCapacityBuilder_.setMessage(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public Builder setEffectiveMinCapacity( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (effectiveMinCapacityBuilder_ == null) { effectiveMinCapacity_ = builderForValue.build(); } else { effectiveMinCapacityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public Builder mergeEffectiveMinCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (effectiveMinCapacityBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && effectiveMinCapacity_ != null && effectiveMinCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getEffectiveMinCapacityBuilder().mergeFrom(value); } else { effectiveMinCapacity_ = value; } } else { effectiveMinCapacityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public Builder clearEffectiveMinCapacity() { bitField0_ = (bitField0_ & ~0x00000020); effectiveMinCapacity_ = null; if (effectiveMinCapacityBuilder_ != null) { effectiveMinCapacityBuilder_.dispose(); effectiveMinCapacityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getEffectiveMinCapacityBuilder() { bitField0_ |= 0x00000020; onChanged(); return getEffectiveMinCapacityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder() { if (effectiveMinCapacityBuilder_ != null) { return effectiveMinCapacityBuilder_.getMessageOrBuilder(); } else { return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_; } } /** * optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getEffectiveMinCapacityFieldBuilder() { if (effectiveMinCapacityBuilder_ == null) { effectiveMinCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getEffectiveMinCapacity(), getParentForChildren(), isClean()); effectiveMinCapacity_ = null; } return effectiveMinCapacityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMaxCapacity_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> effectiveMaxCapacityBuilder_; /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return Whether the effectiveMaxCapacity field is set. */ public boolean hasEffectiveMaxCapacity() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; * @return The effectiveMaxCapacity. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity() { if (effectiveMaxCapacityBuilder_ == null) { return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_; } else { return effectiveMaxCapacityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public Builder setEffectiveMaxCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (effectiveMaxCapacityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } effectiveMaxCapacity_ = value; } else { effectiveMaxCapacityBuilder_.setMessage(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public Builder setEffectiveMaxCapacity( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (effectiveMaxCapacityBuilder_ == null) { effectiveMaxCapacity_ = builderForValue.build(); } else { effectiveMaxCapacityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public Builder mergeEffectiveMaxCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (effectiveMaxCapacityBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0) && effectiveMaxCapacity_ != null && effectiveMaxCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getEffectiveMaxCapacityBuilder().mergeFrom(value); } else { effectiveMaxCapacity_ = value; } } else { effectiveMaxCapacityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public Builder clearEffectiveMaxCapacity() { bitField0_ = (bitField0_ & ~0x00000040); effectiveMaxCapacity_ = null; if (effectiveMaxCapacityBuilder_ != null) { effectiveMaxCapacityBuilder_.dispose(); effectiveMaxCapacityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getEffectiveMaxCapacityBuilder() { bitField0_ |= 0x00000040; onChanged(); return getEffectiveMaxCapacityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder() { if (effectiveMaxCapacityBuilder_ != null) { return effectiveMaxCapacityBuilder_.getMessageOrBuilder(); } else { return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_; } } /** * optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getEffectiveMaxCapacityFieldBuilder() { if (effectiveMaxCapacityBuilder_ == null) { effectiveMaxCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getEffectiveMaxCapacity(), getParentForChildren(), isClean()); effectiveMaxCapacity_ = null; } return effectiveMaxCapacityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMinCapacity_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> configuredMinCapacityBuilder_; /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return Whether the configuredMinCapacity field is set. */ public boolean hasConfiguredMinCapacity() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; * @return The configuredMinCapacity. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity() { if (configuredMinCapacityBuilder_ == null) { return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_; } else { return configuredMinCapacityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public Builder setConfiguredMinCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (configuredMinCapacityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } configuredMinCapacity_ = value; } else { configuredMinCapacityBuilder_.setMessage(value); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public Builder setConfiguredMinCapacity( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (configuredMinCapacityBuilder_ == null) { configuredMinCapacity_ = builderForValue.build(); } else { configuredMinCapacityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public Builder mergeConfiguredMinCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (configuredMinCapacityBuilder_ == null) { if (((bitField0_ & 0x00000080) != 0) && configuredMinCapacity_ != null && configuredMinCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getConfiguredMinCapacityBuilder().mergeFrom(value); } else { configuredMinCapacity_ = value; } } else { configuredMinCapacityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public Builder clearConfiguredMinCapacity() { bitField0_ = (bitField0_ & ~0x00000080); configuredMinCapacity_ = null; if (configuredMinCapacityBuilder_ != null) { configuredMinCapacityBuilder_.dispose(); configuredMinCapacityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getConfiguredMinCapacityBuilder() { bitField0_ |= 0x00000080; onChanged(); return getConfiguredMinCapacityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder() { if (configuredMinCapacityBuilder_ != null) { return configuredMinCapacityBuilder_.getMessageOrBuilder(); } else { return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_; } } /** * optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getConfiguredMinCapacityFieldBuilder() { if (configuredMinCapacityBuilder_ == null) { configuredMinCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getConfiguredMinCapacity(), getParentForChildren(), isClean()); configuredMinCapacity_ = null; } return configuredMinCapacityBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMaxCapacity_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> configuredMaxCapacityBuilder_; /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return Whether the configuredMaxCapacity field is set. */ public boolean hasConfiguredMaxCapacity() { return ((bitField0_ & 0x00000100) != 0); } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; * @return The configuredMaxCapacity. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity() { if (configuredMaxCapacityBuilder_ == null) { return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_; } else { return configuredMaxCapacityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public Builder setConfiguredMaxCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (configuredMaxCapacityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } configuredMaxCapacity_ = value; } else { configuredMaxCapacityBuilder_.setMessage(value); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public Builder setConfiguredMaxCapacity( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (configuredMaxCapacityBuilder_ == null) { configuredMaxCapacity_ = builderForValue.build(); } else { configuredMaxCapacityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public Builder mergeConfiguredMaxCapacity(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (configuredMaxCapacityBuilder_ == null) { if (((bitField0_ & 0x00000100) != 0) && configuredMaxCapacity_ != null && configuredMaxCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getConfiguredMaxCapacityBuilder().mergeFrom(value); } else { configuredMaxCapacity_ = value; } } else { configuredMaxCapacityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000100; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public Builder clearConfiguredMaxCapacity() { bitField0_ = (bitField0_ & ~0x00000100); configuredMaxCapacity_ = null; if (configuredMaxCapacityBuilder_ != null) { configuredMaxCapacityBuilder_.dispose(); configuredMaxCapacityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getConfiguredMaxCapacityBuilder() { bitField0_ |= 0x00000100; onChanged(); return getConfiguredMaxCapacityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder() { if (configuredMaxCapacityBuilder_ != null) { return configuredMaxCapacityBuilder_.getMessageOrBuilder(); } else { return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_; } } /** * optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getConfiguredMaxCapacityFieldBuilder() { if (configuredMaxCapacityBuilder_ == null) { configuredMaxCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getConfiguredMaxCapacity(), getParentForChildren(), isClean()); configuredMaxCapacity_ = null; } return configuredMaxCapacityBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueConfigurationsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueConfigurationsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public QueueConfigurationsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface QueueConfigurationsMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueConfigurationsMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string partitionName = 1; * @return Whether the partitionName field is set. */ boolean hasPartitionName(); /** * required string partitionName = 1; * @return The partitionName. */ java.lang.String getPartitionName(); /** * required string partitionName = 1; * @return The bytes for partitionName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getPartitionNameBytes(); /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return Whether the queueConfigurations field is set. */ boolean hasQueueConfigurations(); /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return The queueConfigurations. */ org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations(); /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.QueueConfigurationsMapProto} */ public static final class QueueConfigurationsMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueConfigurationsMapProto) QueueConfigurationsMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use QueueConfigurationsMapProto.newBuilder() to construct. private QueueConfigurationsMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private QueueConfigurationsMapProto() { partitionName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new QueueConfigurationsMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder.class); } private int bitField0_; public static final int PARTITIONNAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object partitionName_ = ""; /** * required string partitionName = 1; * @return Whether the partitionName field is set. */ @java.lang.Override public boolean hasPartitionName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string partitionName = 1; * @return The partitionName. */ @java.lang.Override public java.lang.String getPartitionName() { java.lang.Object ref = partitionName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { partitionName_ = s; } return s; } } /** * required string partitionName = 1; * @return The bytes for partitionName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPartitionNameBytes() { java.lang.Object ref = partitionName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); partitionName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int QUEUECONFIGURATIONS_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto queueConfigurations_; /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return Whether the queueConfigurations field is set. */ @java.lang.Override public boolean hasQueueConfigurations() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return The queueConfigurations. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations() { return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_; } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder() { return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasPartitionName()) { memoizedIsInitialized = 0; return false; } if (hasQueueConfigurations()) { if (!getQueueConfigurations().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, partitionName_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getQueueConfigurations()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, partitionName_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getQueueConfigurations()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto) obj; if (hasPartitionName() != other.hasPartitionName()) return false; if (hasPartitionName()) { if (!getPartitionName() .equals(other.getPartitionName())) return false; } if (hasQueueConfigurations() != other.hasQueueConfigurations()) return false; if (hasQueueConfigurations()) { if (!getQueueConfigurations() .equals(other.getQueueConfigurations())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPartitionName()) { hash = (37 * hash) + PARTITIONNAME_FIELD_NUMBER; hash = (53 * hash) + getPartitionName().hashCode(); } if (hasQueueConfigurations()) { hash = (37 * hash) + QUEUECONFIGURATIONS_FIELD_NUMBER; hash = (53 * hash) + getQueueConfigurations().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.QueueConfigurationsMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueConfigurationsMapProto) org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getQueueConfigurationsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; partitionName_ = ""; queueConfigurations_ = null; if (queueConfigurationsBuilder_ != null) { queueConfigurationsBuilder_.dispose(); queueConfigurationsBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.partitionName_ = partitionName_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.queueConfigurations_ = queueConfigurationsBuilder_ == null ? queueConfigurations_ : queueConfigurationsBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance()) return this; if (other.hasPartitionName()) { partitionName_ = other.partitionName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasQueueConfigurations()) { mergeQueueConfigurations(other.getQueueConfigurations()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasPartitionName()) { return false; } if (hasQueueConfigurations()) { if (!getQueueConfigurations().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { partitionName_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getQueueConfigurationsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object partitionName_ = ""; /** * required string partitionName = 1; * @return Whether the partitionName field is set. */ public boolean hasPartitionName() { return ((bitField0_ & 0x00000001) != 0); } /** * required string partitionName = 1; * @return The partitionName. */ public java.lang.String getPartitionName() { java.lang.Object ref = partitionName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { partitionName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string partitionName = 1; * @return The bytes for partitionName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getPartitionNameBytes() { java.lang.Object ref = partitionName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); partitionName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string partitionName = 1; * @param value The partitionName to set. * @return This builder for chaining. */ public Builder setPartitionName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } partitionName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string partitionName = 1; * @return This builder for chaining. */ public Builder clearPartitionName() { partitionName_ = getDefaultInstance().getPartitionName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string partitionName = 1; * @param value The bytes for partitionName to set. * @return This builder for chaining. */ public Builder setPartitionNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } partitionName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto queueConfigurations_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder> queueConfigurationsBuilder_; /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return Whether the queueConfigurations field is set. */ public boolean hasQueueConfigurations() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; * @return The queueConfigurations. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations() { if (queueConfigurationsBuilder_ == null) { return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_; } else { return queueConfigurationsBuilder_.getMessage(); } } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public Builder setQueueConfigurations(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto value) { if (queueConfigurationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } queueConfigurations_ = value; } else { queueConfigurationsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public Builder setQueueConfigurations( org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder builderForValue) { if (queueConfigurationsBuilder_ == null) { queueConfigurations_ = builderForValue.build(); } else { queueConfigurationsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public Builder mergeQueueConfigurations(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto value) { if (queueConfigurationsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && queueConfigurations_ != null && queueConfigurations_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance()) { getQueueConfigurationsBuilder().mergeFrom(value); } else { queueConfigurations_ = value; } } else { queueConfigurationsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public Builder clearQueueConfigurations() { bitField0_ = (bitField0_ & ~0x00000002); queueConfigurations_ = null; if (queueConfigurationsBuilder_ != null) { queueConfigurationsBuilder_.dispose(); queueConfigurationsBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder getQueueConfigurationsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getQueueConfigurationsFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder() { if (queueConfigurationsBuilder_ != null) { return queueConfigurationsBuilder_.getMessageOrBuilder(); } else { return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_; } } /** * optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder> getQueueConfigurationsFieldBuilder() { if (queueConfigurationsBuilder_ == null) { queueConfigurationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder>( getQueueConfigurations(), getParentForChildren(), isClean()); queueConfigurations_ = null; } return queueConfigurationsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueConfigurationsMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueConfigurationsMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public QueueConfigurationsMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface QueueUserACLInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueUserACLInfoProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string queueName = 1; * @return Whether the queueName field is set. */ boolean hasQueueName(); /** * optional string queueName = 1; * @return The queueName. */ java.lang.String getQueueName(); /** * optional string queueName = 1; * @return The bytes for queueName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes(); /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return A list containing the userAcls. */ java.util.List getUserAclsList(); /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return The count of userAcls. */ int getUserAclsCount(); /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param index The index of the element to return. * @return The userAcls at the given index. */ org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index); } /** * Protobuf type {@code hadoop.yarn.QueueUserACLInfoProto} */ public static final class QueueUserACLInfoProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueUserACLInfoProto) QueueUserACLInfoProtoOrBuilder { private static final long serialVersionUID = 0L; // Use QueueUserACLInfoProto.newBuilder() to construct. private QueueUserACLInfoProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private QueueUserACLInfoProto() { queueName_ = ""; userAcls_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new QueueUserACLInfoProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder.class); } private int bitField0_; public static final int QUEUENAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object queueName_ = ""; /** * optional string queueName = 1; * @return Whether the queueName field is set. */ @java.lang.Override public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; * @return The queueName. */ @java.lang.Override public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } } /** * optional string queueName = 1; * @return The bytes for queueName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int USERACLS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List userAcls_; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> userAcls_converter_ = new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>() { public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto convert(java.lang.Integer from) { org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(from); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.QACL_SUBMIT_APPLICATIONS : result; } }; /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return A list containing the userAcls. */ @java.lang.Override public java.util.List getUserAclsList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>(userAcls_, userAcls_converter_); } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return The count of userAcls. */ @java.lang.Override public int getUserAclsCount() { return userAcls_.size(); } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param index The index of the element to return. * @return The userAcls at the given index. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index) { return userAcls_converter_.convert(userAcls_.get(index)); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_); } for (int i = 0; i < userAcls_.size(); i++) { output.writeEnum(2, userAcls_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_); } { int dataSize = 0; for (int i = 0; i < userAcls_.size(); i++) { dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSizeNoTag(userAcls_.get(i)); } size += dataSize; size += 1 * userAcls_.size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto) obj; if (hasQueueName() != other.hasQueueName()) return false; if (hasQueueName()) { if (!getQueueName() .equals(other.getQueueName())) return false; } if (!userAcls_.equals(other.userAcls_)) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQueueName()) { hash = (37 * hash) + QUEUENAME_FIELD_NUMBER; hash = (53 * hash) + getQueueName().hashCode(); } if (getUserAclsCount() > 0) { hash = (37 * hash) + USERACLS_FIELD_NUMBER; hash = (53 * hash) + userAcls_.hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.QueueUserACLInfoProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueUserACLInfoProto) org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; queueName_ = ""; userAcls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto build() { org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result) { if (((bitField0_ & 0x00000002) != 0)) { userAcls_ = java.util.Collections.unmodifiableList(userAcls_); bitField0_ = (bitField0_ & ~0x00000002); } result.userAcls_ = userAcls_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.queueName_ = queueName_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance()) return this; if (other.hasQueueName()) { queueName_ = other.queueName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.userAcls_.isEmpty()) { if (userAcls_.isEmpty()) { userAcls_ = other.userAcls_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureUserAclsIsMutable(); userAcls_.addAll(other.userAcls_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { queueName_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { ensureUserAclsIsMutable(); userAcls_.add(tmpRaw); } break; } // case 16 case 18: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { ensureUserAclsIsMutable(); userAcls_.add(tmpRaw); } } input.popLimit(oldLimit); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object queueName_ = ""; /** * optional string queueName = 1; * @return Whether the queueName field is set. */ public boolean hasQueueName() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string queueName = 1; * @return The queueName. */ public java.lang.String getQueueName() { java.lang.Object ref = queueName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { queueName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string queueName = 1; * @return The bytes for queueName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getQueueNameBytes() { java.lang.Object ref = queueName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); queueName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string queueName = 1; * @param value The queueName to set. * @return This builder for chaining. */ public Builder setQueueName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } queueName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string queueName = 1; * @return This builder for chaining. */ public Builder clearQueueName() { queueName_ = getDefaultInstance().getQueueName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string queueName = 1; * @param value The bytes for queueName to set. * @return This builder for chaining. */ public Builder setQueueNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } queueName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List userAcls_ = java.util.Collections.emptyList(); private void ensureUserAclsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { userAcls_ = new java.util.ArrayList(userAcls_); bitField0_ |= 0x00000002; } } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return A list containing the userAcls. */ public java.util.List getUserAclsList() { return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter< java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>(userAcls_, userAcls_converter_); } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return The count of userAcls. */ public int getUserAclsCount() { return userAcls_.size(); } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param index The index of the element to return. * @return The userAcls at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index) { return userAcls_converter_.convert(userAcls_.get(index)); } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param index The index to set the value at. * @param value The userAcls to set. * @return This builder for chaining. */ public Builder setUserAcls( int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value) { if (value == null) { throw new NullPointerException(); } ensureUserAclsIsMutable(); userAcls_.set(index, value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param value The userAcls to add. * @return This builder for chaining. */ public Builder addUserAcls(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value) { if (value == null) { throw new NullPointerException(); } ensureUserAclsIsMutable(); userAcls_.add(value.getNumber()); onChanged(); return this; } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @param values The userAcls to add. * @return This builder for chaining. */ public Builder addAllUserAcls( java.lang.Iterable values) { ensureUserAclsIsMutable(); for (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value : values) { userAcls_.add(value.getNumber()); } onChanged(); return this; } /** * repeated .hadoop.yarn.QueueACLProto userAcls = 2; * @return This builder for chaining. */ public Builder clearUserAcls() { userAcls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueUserACLInfoProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueUserACLInfoProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public QueueUserACLInfoProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PlacementConstraintProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return Whether the simpleConstraint field is set. */ boolean hasSimpleConstraint(); /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return The simpleConstraint. */ org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint(); /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder(); /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return Whether the compositeConstraint field is set. */ boolean hasCompositeConstraint(); /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return The compositeConstraint. */ org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint(); /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.PlacementConstraintProto} */ public static final class PlacementConstraintProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintProto) PlacementConstraintProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PlacementConstraintProto.newBuilder() to construct. private PlacementConstraintProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PlacementConstraintProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PlacementConstraintProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder.class); } private int bitField0_; public static final int SIMPLECONSTRAINT_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto simpleConstraint_; /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return Whether the simpleConstraint field is set. */ @java.lang.Override public boolean hasSimpleConstraint() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return The simpleConstraint. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint() { return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_; } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder() { return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_; } public static final int COMPOSITECONSTRAINT_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto compositeConstraint_; /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return Whether the compositeConstraint field is set. */ @java.lang.Override public boolean hasCompositeConstraint() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return The compositeConstraint. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint() { return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_; } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder() { return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasSimpleConstraint()) { if (!getSimpleConstraint().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasCompositeConstraint()) { if (!getCompositeConstraint().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSimpleConstraint()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getCompositeConstraint()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getSimpleConstraint()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getCompositeConstraint()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto) obj; if (hasSimpleConstraint() != other.hasSimpleConstraint()) return false; if (hasSimpleConstraint()) { if (!getSimpleConstraint() .equals(other.getSimpleConstraint())) return false; } if (hasCompositeConstraint() != other.hasCompositeConstraint()) return false; if (hasCompositeConstraint()) { if (!getCompositeConstraint() .equals(other.getCompositeConstraint())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSimpleConstraint()) { hash = (37 * hash) + SIMPLECONSTRAINT_FIELD_NUMBER; hash = (53 * hash) + getSimpleConstraint().hashCode(); } if (hasCompositeConstraint()) { hash = (37 * hash) + COMPOSITECONSTRAINT_FIELD_NUMBER; hash = (53 * hash) + getCompositeConstraint().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PlacementConstraintProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintProto) org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getSimpleConstraintFieldBuilder(); getCompositeConstraintFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; simpleConstraint_ = null; if (simpleConstraintBuilder_ != null) { simpleConstraintBuilder_.dispose(); simpleConstraintBuilder_ = null; } compositeConstraint_ = null; if (compositeConstraintBuilder_ != null) { compositeConstraintBuilder_.dispose(); compositeConstraintBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.simpleConstraint_ = simpleConstraintBuilder_ == null ? simpleConstraint_ : simpleConstraintBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.compositeConstraint_ = compositeConstraintBuilder_ == null ? compositeConstraint_ : compositeConstraintBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) return this; if (other.hasSimpleConstraint()) { mergeSimpleConstraint(other.getSimpleConstraint()); } if (other.hasCompositeConstraint()) { mergeCompositeConstraint(other.getCompositeConstraint()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasSimpleConstraint()) { if (!getSimpleConstraint().isInitialized()) { return false; } } if (hasCompositeConstraint()) { if (!getCompositeConstraint().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getSimpleConstraintFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getCompositeConstraintFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto simpleConstraint_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder> simpleConstraintBuilder_; /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return Whether the simpleConstraint field is set. */ public boolean hasSimpleConstraint() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; * @return The simpleConstraint. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint() { if (simpleConstraintBuilder_ == null) { return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_; } else { return simpleConstraintBuilder_.getMessage(); } } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public Builder setSimpleConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto value) { if (simpleConstraintBuilder_ == null) { if (value == null) { throw new NullPointerException(); } simpleConstraint_ = value; } else { simpleConstraintBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public Builder setSimpleConstraint( org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder builderForValue) { if (simpleConstraintBuilder_ == null) { simpleConstraint_ = builderForValue.build(); } else { simpleConstraintBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public Builder mergeSimpleConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto value) { if (simpleConstraintBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && simpleConstraint_ != null && simpleConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance()) { getSimpleConstraintBuilder().mergeFrom(value); } else { simpleConstraint_ = value; } } else { simpleConstraintBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public Builder clearSimpleConstraint() { bitField0_ = (bitField0_ & ~0x00000001); simpleConstraint_ = null; if (simpleConstraintBuilder_ != null) { simpleConstraintBuilder_.dispose(); simpleConstraintBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder getSimpleConstraintBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSimpleConstraintFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder() { if (simpleConstraintBuilder_ != null) { return simpleConstraintBuilder_.getMessageOrBuilder(); } else { return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_; } } /** * optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder> getSimpleConstraintFieldBuilder() { if (simpleConstraintBuilder_ == null) { simpleConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder>( getSimpleConstraint(), getParentForChildren(), isClean()); simpleConstraint_ = null; } return simpleConstraintBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto compositeConstraint_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder> compositeConstraintBuilder_; /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return Whether the compositeConstraint field is set. */ public boolean hasCompositeConstraint() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; * @return The compositeConstraint. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint() { if (compositeConstraintBuilder_ == null) { return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_; } else { return compositeConstraintBuilder_.getMessage(); } } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public Builder setCompositeConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto value) { if (compositeConstraintBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compositeConstraint_ = value; } else { compositeConstraintBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public Builder setCompositeConstraint( org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder builderForValue) { if (compositeConstraintBuilder_ == null) { compositeConstraint_ = builderForValue.build(); } else { compositeConstraintBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public Builder mergeCompositeConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto value) { if (compositeConstraintBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && compositeConstraint_ != null && compositeConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance()) { getCompositeConstraintBuilder().mergeFrom(value); } else { compositeConstraint_ = value; } } else { compositeConstraintBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public Builder clearCompositeConstraint() { bitField0_ = (bitField0_ & ~0x00000002); compositeConstraint_ = null; if (compositeConstraintBuilder_ != null) { compositeConstraintBuilder_.dispose(); compositeConstraintBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder getCompositeConstraintBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCompositeConstraintFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder() { if (compositeConstraintBuilder_ != null) { return compositeConstraintBuilder_.getMessageOrBuilder(); } else { return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_; } } /** * optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder> getCompositeConstraintFieldBuilder() { if (compositeConstraintBuilder_ == null) { compositeConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder>( getCompositeConstraint(), getParentForChildren(), isClean()); compositeConstraint_ = null; } return compositeConstraintBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PlacementConstraintProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface SimplePlacementConstraintProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.SimplePlacementConstraintProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required string scope = 1; * @return Whether the scope field is set. */ boolean hasScope(); /** * required string scope = 1; * @return The scope. */ java.lang.String getScope(); /** * required string scope = 1; * @return The bytes for scope. */ org.apache.hadoop.thirdparty.protobuf.ByteString getScopeBytes(); /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ java.util.List getTargetExpressionsList(); /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index); /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ int getTargetExpressionsCount(); /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ java.util.List getTargetExpressionsOrBuilderList(); /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder( int index); /** * optional int32 minCardinality = 3; * @return Whether the minCardinality field is set. */ boolean hasMinCardinality(); /** * optional int32 minCardinality = 3; * @return The minCardinality. */ int getMinCardinality(); /** * optional int32 maxCardinality = 4; * @return Whether the maxCardinality field is set. */ boolean hasMaxCardinality(); /** * optional int32 maxCardinality = 4; * @return The maxCardinality. */ int getMaxCardinality(); /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return Whether the attributeOpCode field is set. */ boolean hasAttributeOpCode(); /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return The attributeOpCode. */ org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode(); } /** * Protobuf type {@code hadoop.yarn.SimplePlacementConstraintProto} */ public static final class SimplePlacementConstraintProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.SimplePlacementConstraintProto) SimplePlacementConstraintProtoOrBuilder { private static final long serialVersionUID = 0L; // Use SimplePlacementConstraintProto.newBuilder() to construct. private SimplePlacementConstraintProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private SimplePlacementConstraintProto() { scope_ = ""; targetExpressions_ = java.util.Collections.emptyList(); attributeOpCode_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new SimplePlacementConstraintProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder.class); } private int bitField0_; public static final int SCOPE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object scope_ = ""; /** * required string scope = 1; * @return Whether the scope field is set. */ @java.lang.Override public boolean hasScope() { return ((bitField0_ & 0x00000001) != 0); } /** * required string scope = 1; * @return The scope. */ @java.lang.Override public java.lang.String getScope() { java.lang.Object ref = scope_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { scope_ = s; } return s; } } /** * required string scope = 1; * @return The bytes for scope. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getScopeBytes() { java.lang.Object ref = scope_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scope_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TARGETEXPRESSIONS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List targetExpressions_; /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ @java.lang.Override public java.util.List getTargetExpressionsList() { return targetExpressions_; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ @java.lang.Override public java.util.List getTargetExpressionsOrBuilderList() { return targetExpressions_; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ @java.lang.Override public int getTargetExpressionsCount() { return targetExpressions_.size(); } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index) { return targetExpressions_.get(index); } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder( int index) { return targetExpressions_.get(index); } public static final int MINCARDINALITY_FIELD_NUMBER = 3; private int minCardinality_ = 0; /** * optional int32 minCardinality = 3; * @return Whether the minCardinality field is set. */ @java.lang.Override public boolean hasMinCardinality() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 minCardinality = 3; * @return The minCardinality. */ @java.lang.Override public int getMinCardinality() { return minCardinality_; } public static final int MAXCARDINALITY_FIELD_NUMBER = 4; private int maxCardinality_ = 0; /** * optional int32 maxCardinality = 4; * @return Whether the maxCardinality field is set. */ @java.lang.Override public boolean hasMaxCardinality() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 maxCardinality = 4; * @return The maxCardinality. */ @java.lang.Override public int getMaxCardinality() { return maxCardinality_; } public static final int ATTRIBUTEOPCODE_FIELD_NUMBER = 5; private int attributeOpCode_ = 1; /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return Whether the attributeOpCode field is set. */ @java.lang.Override public boolean hasAttributeOpCode() { return ((bitField0_ & 0x00000008) != 0); } /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return The attributeOpCode. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(attributeOpCode_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.NO_OP : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasScope()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getTargetExpressionsCount(); i++) { if (!getTargetExpressions(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, scope_); } for (int i = 0; i < targetExpressions_.size(); i++) { output.writeMessage(2, targetExpressions_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(3, minCardinality_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(4, maxCardinality_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeEnum(5, attributeOpCode_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, scope_); } for (int i = 0; i < targetExpressions_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, targetExpressions_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, minCardinality_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, maxCardinality_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(5, attributeOpCode_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto) obj; if (hasScope() != other.hasScope()) return false; if (hasScope()) { if (!getScope() .equals(other.getScope())) return false; } if (!getTargetExpressionsList() .equals(other.getTargetExpressionsList())) return false; if (hasMinCardinality() != other.hasMinCardinality()) return false; if (hasMinCardinality()) { if (getMinCardinality() != other.getMinCardinality()) return false; } if (hasMaxCardinality() != other.hasMaxCardinality()) return false; if (hasMaxCardinality()) { if (getMaxCardinality() != other.getMaxCardinality()) return false; } if (hasAttributeOpCode() != other.hasAttributeOpCode()) return false; if (hasAttributeOpCode()) { if (attributeOpCode_ != other.attributeOpCode_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasScope()) { hash = (37 * hash) + SCOPE_FIELD_NUMBER; hash = (53 * hash) + getScope().hashCode(); } if (getTargetExpressionsCount() > 0) { hash = (37 * hash) + TARGETEXPRESSIONS_FIELD_NUMBER; hash = (53 * hash) + getTargetExpressionsList().hashCode(); } if (hasMinCardinality()) { hash = (37 * hash) + MINCARDINALITY_FIELD_NUMBER; hash = (53 * hash) + getMinCardinality(); } if (hasMaxCardinality()) { hash = (37 * hash) + MAXCARDINALITY_FIELD_NUMBER; hash = (53 * hash) + getMaxCardinality(); } if (hasAttributeOpCode()) { hash = (37 * hash) + ATTRIBUTEOPCODE_FIELD_NUMBER; hash = (53 * hash) + attributeOpCode_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.SimplePlacementConstraintProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.SimplePlacementConstraintProto) org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; scope_ = ""; if (targetExpressionsBuilder_ == null) { targetExpressions_ = java.util.Collections.emptyList(); } else { targetExpressions_ = null; targetExpressionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); minCardinality_ = 0; maxCardinality_ = 0; attributeOpCode_ = 1; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto build() { org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result) { if (targetExpressionsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { targetExpressions_ = java.util.Collections.unmodifiableList(targetExpressions_); bitField0_ = (bitField0_ & ~0x00000002); } result.targetExpressions_ = targetExpressions_; } else { result.targetExpressions_ = targetExpressionsBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.scope_ = scope_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.minCardinality_ = minCardinality_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000008) != 0)) { result.maxCardinality_ = maxCardinality_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000010) != 0)) { result.attributeOpCode_ = attributeOpCode_; to_bitField0_ |= 0x00000008; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance()) return this; if (other.hasScope()) { scope_ = other.scope_; bitField0_ |= 0x00000001; onChanged(); } if (targetExpressionsBuilder_ == null) { if (!other.targetExpressions_.isEmpty()) { if (targetExpressions_.isEmpty()) { targetExpressions_ = other.targetExpressions_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureTargetExpressionsIsMutable(); targetExpressions_.addAll(other.targetExpressions_); } onChanged(); } } else { if (!other.targetExpressions_.isEmpty()) { if (targetExpressionsBuilder_.isEmpty()) { targetExpressionsBuilder_.dispose(); targetExpressionsBuilder_ = null; targetExpressions_ = other.targetExpressions_; bitField0_ = (bitField0_ & ~0x00000002); targetExpressionsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTargetExpressionsFieldBuilder() : null; } else { targetExpressionsBuilder_.addAllMessages(other.targetExpressions_); } } } if (other.hasMinCardinality()) { setMinCardinality(other.getMinCardinality()); } if (other.hasMaxCardinality()) { setMaxCardinality(other.getMaxCardinality()); } if (other.hasAttributeOpCode()) { setAttributeOpCode(other.getAttributeOpCode()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasScope()) { return false; } for (int i = 0; i < getTargetExpressionsCount(); i++) { if (!getTargetExpressions(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { scope_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.PARSER, extensionRegistry); if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); targetExpressions_.add(m); } else { targetExpressionsBuilder_.addMessage(m); } break; } // case 18 case 24: { minCardinality_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { maxCardinality_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(5, tmpRaw); } else { attributeOpCode_ = tmpRaw; bitField0_ |= 0x00000010; } break; } // case 40 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object scope_ = ""; /** * required string scope = 1; * @return Whether the scope field is set. */ public boolean hasScope() { return ((bitField0_ & 0x00000001) != 0); } /** * required string scope = 1; * @return The scope. */ public java.lang.String getScope() { java.lang.Object ref = scope_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { scope_ = s; } return s; } else { return (java.lang.String) ref; } } /** * required string scope = 1; * @return The bytes for scope. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getScopeBytes() { java.lang.Object ref = scope_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); scope_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * required string scope = 1; * @param value The scope to set. * @return This builder for chaining. */ public Builder setScope( java.lang.String value) { if (value == null) { throw new NullPointerException(); } scope_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * required string scope = 1; * @return This builder for chaining. */ public Builder clearScope() { scope_ = getDefaultInstance().getScope(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * required string scope = 1; * @param value The bytes for scope to set. * @return This builder for chaining. */ public Builder setScopeBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } scope_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List targetExpressions_ = java.util.Collections.emptyList(); private void ensureTargetExpressionsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { targetExpressions_ = new java.util.ArrayList(targetExpressions_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> targetExpressionsBuilder_; /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public java.util.List getTargetExpressionsList() { if (targetExpressionsBuilder_ == null) { return java.util.Collections.unmodifiableList(targetExpressions_); } else { return targetExpressionsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public int getTargetExpressionsCount() { if (targetExpressionsBuilder_ == null) { return targetExpressions_.size(); } else { return targetExpressionsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index) { if (targetExpressionsBuilder_ == null) { return targetExpressions_.get(index); } else { return targetExpressionsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder setTargetExpressions( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) { if (targetExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTargetExpressionsIsMutable(); targetExpressions_.set(index, value); onChanged(); } else { targetExpressionsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder setTargetExpressions( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) { if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); targetExpressions_.set(index, builderForValue.build()); onChanged(); } else { targetExpressionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder addTargetExpressions(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) { if (targetExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTargetExpressionsIsMutable(); targetExpressions_.add(value); onChanged(); } else { targetExpressionsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder addTargetExpressions( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) { if (targetExpressionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTargetExpressionsIsMutable(); targetExpressions_.add(index, value); onChanged(); } else { targetExpressionsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder addTargetExpressions( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) { if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); targetExpressions_.add(builderForValue.build()); onChanged(); } else { targetExpressionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder addTargetExpressions( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) { if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); targetExpressions_.add(index, builderForValue.build()); onChanged(); } else { targetExpressionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder addAllTargetExpressions( java.lang.Iterable values) { if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, targetExpressions_); onChanged(); } else { targetExpressionsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder clearTargetExpressions() { if (targetExpressionsBuilder_ == null) { targetExpressions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { targetExpressionsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public Builder removeTargetExpressions(int index) { if (targetExpressionsBuilder_ == null) { ensureTargetExpressionsIsMutable(); targetExpressions_.remove(index); onChanged(); } else { targetExpressionsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder getTargetExpressionsBuilder( int index) { return getTargetExpressionsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder( int index) { if (targetExpressionsBuilder_ == null) { return targetExpressions_.get(index); } else { return targetExpressionsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public java.util.List getTargetExpressionsOrBuilderList() { if (targetExpressionsBuilder_ != null) { return targetExpressionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(targetExpressions_); } } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder addTargetExpressionsBuilder() { return getTargetExpressionsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder addTargetExpressionsBuilder( int index) { return getTargetExpressionsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2; */ public java.util.List getTargetExpressionsBuilderList() { return getTargetExpressionsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> getTargetExpressionsFieldBuilder() { if (targetExpressionsBuilder_ == null) { targetExpressionsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder>( targetExpressions_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); targetExpressions_ = null; } return targetExpressionsBuilder_; } private int minCardinality_ ; /** * optional int32 minCardinality = 3; * @return Whether the minCardinality field is set. */ @java.lang.Override public boolean hasMinCardinality() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 minCardinality = 3; * @return The minCardinality. */ @java.lang.Override public int getMinCardinality() { return minCardinality_; } /** * optional int32 minCardinality = 3; * @param value The minCardinality to set. * @return This builder for chaining. */ public Builder setMinCardinality(int value) { minCardinality_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 minCardinality = 3; * @return This builder for chaining. */ public Builder clearMinCardinality() { bitField0_ = (bitField0_ & ~0x00000004); minCardinality_ = 0; onChanged(); return this; } private int maxCardinality_ ; /** * optional int32 maxCardinality = 4; * @return Whether the maxCardinality field is set. */ @java.lang.Override public boolean hasMaxCardinality() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 maxCardinality = 4; * @return The maxCardinality. */ @java.lang.Override public int getMaxCardinality() { return maxCardinality_; } /** * optional int32 maxCardinality = 4; * @param value The maxCardinality to set. * @return This builder for chaining. */ public Builder setMaxCardinality(int value) { maxCardinality_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int32 maxCardinality = 4; * @return This builder for chaining. */ public Builder clearMaxCardinality() { bitField0_ = (bitField0_ & ~0x00000008); maxCardinality_ = 0; onChanged(); return this; } private int attributeOpCode_ = 1; /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return Whether the attributeOpCode field is set. */ @java.lang.Override public boolean hasAttributeOpCode() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return The attributeOpCode. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode() { org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(attributeOpCode_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.NO_OP : result; } /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @param value The attributeOpCode to set. * @return This builder for chaining. */ public Builder setAttributeOpCode(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; attributeOpCode_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5; * @return This builder for chaining. */ public Builder clearAttributeOpCode() { bitField0_ = (bitField0_ & ~0x00000010); attributeOpCode_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.SimplePlacementConstraintProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.SimplePlacementConstraintProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public SimplePlacementConstraintProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PlacementConstraintTargetProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintTargetProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return Whether the targetType field is set. */ boolean hasTargetType(); /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return The targetType. */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType(); /** * optional string targetKey = 2; * @return Whether the targetKey field is set. */ boolean hasTargetKey(); /** * optional string targetKey = 2; * @return The targetKey. */ java.lang.String getTargetKey(); /** * optional string targetKey = 2; * @return The bytes for targetKey. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTargetKeyBytes(); /** * repeated string targetValues = 3; * @return A list containing the targetValues. */ java.util.List getTargetValuesList(); /** * repeated string targetValues = 3; * @return The count of targetValues. */ int getTargetValuesCount(); /** * repeated string targetValues = 3; * @param index The index of the element to return. * @return The targetValues at the given index. */ java.lang.String getTargetValues(int index); /** * repeated string targetValues = 3; * @param index The index of the value to return. * @return The bytes of the targetValues at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTargetValuesBytes(int index); } /** * Protobuf type {@code hadoop.yarn.PlacementConstraintTargetProto} */ public static final class PlacementConstraintTargetProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintTargetProto) PlacementConstraintTargetProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PlacementConstraintTargetProto.newBuilder() to construct. private PlacementConstraintTargetProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PlacementConstraintTargetProto() { targetType_ = 1; targetKey_ = ""; targetValues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PlacementConstraintTargetProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder.class); } /** * Protobuf enum {@code hadoop.yarn.PlacementConstraintTargetProto.TargetType} */ public enum TargetType implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * NODE_ATTRIBUTE = 1; */ NODE_ATTRIBUTE(1), /** * ALLOCATION_TAG = 2; */ ALLOCATION_TAG(2), /** * SELF = 3; */ SELF(3), ; /** * NODE_ATTRIBUTE = 1; */ public static final int NODE_ATTRIBUTE_VALUE = 1; /** * ALLOCATION_TAG = 2; */ public static final int ALLOCATION_TAG_VALUE = 2; /** * SELF = 3; */ public static final int SELF_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TargetType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static TargetType forNumber(int value) { switch (value) { case 1: return NODE_ATTRIBUTE; case 2: return ALLOCATION_TAG; case 3: return SELF; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< TargetType> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public TargetType findValueByNumber(int number) { return TargetType.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDescriptor().getEnumTypes().get(0); } private static final TargetType[] VALUES = values(); public static TargetType valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private TargetType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.PlacementConstraintTargetProto.TargetType) } private int bitField0_; public static final int TARGETTYPE_FIELD_NUMBER = 1; private int targetType_ = 1; /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return Whether the targetType field is set. */ @java.lang.Override public boolean hasTargetType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return The targetType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType result = org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(targetType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.NODE_ATTRIBUTE : result; } public static final int TARGETKEY_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object targetKey_ = ""; /** * optional string targetKey = 2; * @return Whether the targetKey field is set. */ @java.lang.Override public boolean hasTargetKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string targetKey = 2; * @return The targetKey. */ @java.lang.Override public java.lang.String getTargetKey() { java.lang.Object ref = targetKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { targetKey_ = s; } return s; } } /** * optional string targetKey = 2; * @return The bytes for targetKey. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTargetKeyBytes() { java.lang.Object ref = targetKey_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); targetKey_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int TARGETVALUES_FIELD_NUMBER = 3; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList targetValues_; /** * repeated string targetValues = 3; * @return A list containing the targetValues. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getTargetValuesList() { return targetValues_; } /** * repeated string targetValues = 3; * @return The count of targetValues. */ public int getTargetValuesCount() { return targetValues_.size(); } /** * repeated string targetValues = 3; * @param index The index of the element to return. * @return The targetValues at the given index. */ public java.lang.String getTargetValues(int index) { return targetValues_.get(index); } /** * repeated string targetValues = 3; * @param index The index of the value to return. * @return The bytes of the targetValues at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTargetValuesBytes(int index) { return targetValues_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTargetType()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, targetType_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetKey_); } for (int i = 0; i < targetValues_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, targetValues_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, targetType_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, targetKey_); } { int dataSize = 0; for (int i = 0; i < targetValues_.size(); i++) { dataSize += computeStringSizeNoTag(targetValues_.getRaw(i)); } size += dataSize; size += 1 * getTargetValuesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto) obj; if (hasTargetType() != other.hasTargetType()) return false; if (hasTargetType()) { if (targetType_ != other.targetType_) return false; } if (hasTargetKey() != other.hasTargetKey()) return false; if (hasTargetKey()) { if (!getTargetKey() .equals(other.getTargetKey())) return false; } if (!getTargetValuesList() .equals(other.getTargetValuesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTargetType()) { hash = (37 * hash) + TARGETTYPE_FIELD_NUMBER; hash = (53 * hash) + targetType_; } if (hasTargetKey()) { hash = (37 * hash) + TARGETKEY_FIELD_NUMBER; hash = (53 * hash) + getTargetKey().hashCode(); } if (getTargetValuesCount() > 0) { hash = (37 * hash) + TARGETVALUES_FIELD_NUMBER; hash = (53 * hash) + getTargetValuesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.PlacementConstraintTargetProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintTargetProto) org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; targetType_ = 1; targetKey_ = ""; targetValues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result) { if (((bitField0_ & 0x00000004) != 0)) { targetValues_ = targetValues_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.targetValues_ = targetValues_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.targetType_ = targetType_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.targetKey_ = targetKey_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance()) return this; if (other.hasTargetType()) { setTargetType(other.getTargetType()); } if (other.hasTargetKey()) { targetKey_ = other.targetKey_; bitField0_ |= 0x00000002; onChanged(); } if (!other.targetValues_.isEmpty()) { if (targetValues_.isEmpty()) { targetValues_ = other.targetValues_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureTargetValuesIsMutable(); targetValues_.addAll(other.targetValues_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasTargetType()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { targetType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { targetKey_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureTargetValuesIsMutable(); targetValues_.add(bs); break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int targetType_ = 1; /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return Whether the targetType field is set. */ @java.lang.Override public boolean hasTargetType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return The targetType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType result = org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(targetType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.NODE_ATTRIBUTE : result; } /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @param value The targetType to set. * @return This builder for chaining. */ public Builder setTargetType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; targetType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1; * @return This builder for chaining. */ public Builder clearTargetType() { bitField0_ = (bitField0_ & ~0x00000001); targetType_ = 1; onChanged(); return this; } private java.lang.Object targetKey_ = ""; /** * optional string targetKey = 2; * @return Whether the targetKey field is set. */ public boolean hasTargetKey() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string targetKey = 2; * @return The targetKey. */ public java.lang.String getTargetKey() { java.lang.Object ref = targetKey_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { targetKey_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string targetKey = 2; * @return The bytes for targetKey. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTargetKeyBytes() { java.lang.Object ref = targetKey_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); targetKey_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string targetKey = 2; * @param value The targetKey to set. * @return This builder for chaining. */ public Builder setTargetKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } targetKey_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string targetKey = 2; * @return This builder for chaining. */ public Builder clearTargetKey() { targetKey_ = getDefaultInstance().getTargetKey(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string targetKey = 2; * @param value The bytes for targetKey to set. * @return This builder for chaining. */ public Builder setTargetKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } targetKey_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList targetValues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureTargetValuesIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { targetValues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(targetValues_); bitField0_ |= 0x00000004; } } /** * repeated string targetValues = 3; * @return A list containing the targetValues. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getTargetValuesList() { return targetValues_.getUnmodifiableView(); } /** * repeated string targetValues = 3; * @return The count of targetValues. */ public int getTargetValuesCount() { return targetValues_.size(); } /** * repeated string targetValues = 3; * @param index The index of the element to return. * @return The targetValues at the given index. */ public java.lang.String getTargetValues(int index) { return targetValues_.get(index); } /** * repeated string targetValues = 3; * @param index The index of the value to return. * @return The bytes of the targetValues at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTargetValuesBytes(int index) { return targetValues_.getByteString(index); } /** * repeated string targetValues = 3; * @param index The index to set the value at. * @param value The targetValues to set. * @return This builder for chaining. */ public Builder setTargetValues( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTargetValuesIsMutable(); targetValues_.set(index, value); onChanged(); return this; } /** * repeated string targetValues = 3; * @param value The targetValues to add. * @return This builder for chaining. */ public Builder addTargetValues( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTargetValuesIsMutable(); targetValues_.add(value); onChanged(); return this; } /** * repeated string targetValues = 3; * @param values The targetValues to add. * @return This builder for chaining. */ public Builder addAllTargetValues( java.lang.Iterable values) { ensureTargetValuesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, targetValues_); onChanged(); return this; } /** * repeated string targetValues = 3; * @return This builder for chaining. */ public Builder clearTargetValues() { targetValues_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * repeated string targetValues = 3; * @param value The bytes of the targetValues to add. * @return This builder for chaining. */ public Builder addTargetValuesBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureTargetValuesIsMutable(); targetValues_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintTargetProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintTargetProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PlacementConstraintTargetProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface TimedPlacementConstraintProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.TimedPlacementConstraintProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return Whether the placementConstraint field is set. */ boolean hasPlacementConstraint(); /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return The placementConstraint. */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint(); /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder(); /** * required int64 schedulingDelay = 2; * @return Whether the schedulingDelay field is set. */ boolean hasSchedulingDelay(); /** * required int64 schedulingDelay = 2; * @return The schedulingDelay. */ long getSchedulingDelay(); /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return Whether the delayUnit field is set. */ boolean hasDelayUnit(); /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return The delayUnit. */ org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit(); } /** * Protobuf type {@code hadoop.yarn.TimedPlacementConstraintProto} */ public static final class TimedPlacementConstraintProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.TimedPlacementConstraintProto) TimedPlacementConstraintProtoOrBuilder { private static final long serialVersionUID = 0L; // Use TimedPlacementConstraintProto.newBuilder() to construct. private TimedPlacementConstraintProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private TimedPlacementConstraintProto() { delayUnit_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TimedPlacementConstraintProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder.class); } /** * Protobuf enum {@code hadoop.yarn.TimedPlacementConstraintProto.DelayUnit} */ public enum DelayUnit implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** * MILLISECONDS = 1; */ MILLISECONDS(1), /** * OPPORTUNITIES = 2; */ OPPORTUNITIES(2), ; /** * MILLISECONDS = 1; */ public static final int MILLISECONDS_VALUE = 1; /** * OPPORTUNITIES = 2; */ public static final int OPPORTUNITIES_VALUE = 2; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DelayUnit valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static DelayUnit forNumber(int value) { switch (value) { case 1: return MILLISECONDS; case 2: return OPPORTUNITIES; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< DelayUnit> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public DelayUnit findValueByNumber(int number) { return DelayUnit.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDescriptor().getEnumTypes().get(0); } private static final DelayUnit[] VALUES = values(); public static DelayUnit valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private DelayUnit(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.TimedPlacementConstraintProto.DelayUnit) } private int bitField0_; public static final int PLACEMENTCONSTRAINT_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return Whether the placementConstraint field is set. */ @java.lang.Override public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return The placementConstraint. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } public static final int SCHEDULINGDELAY_FIELD_NUMBER = 2; private long schedulingDelay_ = 0L; /** * required int64 schedulingDelay = 2; * @return Whether the schedulingDelay field is set. */ @java.lang.Override public boolean hasSchedulingDelay() { return ((bitField0_ & 0x00000002) != 0); } /** * required int64 schedulingDelay = 2; * @return The schedulingDelay. */ @java.lang.Override public long getSchedulingDelay() { return schedulingDelay_; } public static final int DELAYUNIT_FIELD_NUMBER = 3; private int delayUnit_ = 1; /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return Whether the delayUnit field is set. */ @java.lang.Override public boolean hasDelayUnit() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return The delayUnit. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit() { org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit result = org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(delayUnit_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.MILLISECONDS : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasPlacementConstraint()) { memoizedIsInitialized = 0; return false; } if (!hasSchedulingDelay()) { memoizedIsInitialized = 0; return false; } if (!getPlacementConstraint().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPlacementConstraint()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, schedulingDelay_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(3, delayUnit_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getPlacementConstraint()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, schedulingDelay_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(3, delayUnit_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto) obj; if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false; if (hasPlacementConstraint()) { if (!getPlacementConstraint() .equals(other.getPlacementConstraint())) return false; } if (hasSchedulingDelay() != other.hasSchedulingDelay()) return false; if (hasSchedulingDelay()) { if (getSchedulingDelay() != other.getSchedulingDelay()) return false; } if (hasDelayUnit() != other.hasDelayUnit()) return false; if (hasDelayUnit()) { if (delayUnit_ != other.delayUnit_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPlacementConstraint()) { hash = (37 * hash) + PLACEMENTCONSTRAINT_FIELD_NUMBER; hash = (53 * hash) + getPlacementConstraint().hashCode(); } if (hasSchedulingDelay()) { hash = (37 * hash) + SCHEDULINGDELAY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getSchedulingDelay()); } if (hasDelayUnit()) { hash = (37 * hash) + DELAYUNIT_FIELD_NUMBER; hash = (53 * hash) + delayUnit_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.TimedPlacementConstraintProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.TimedPlacementConstraintProto) org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPlacementConstraintFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } schedulingDelay_ = 0L; delayUnit_ = 1; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto build() { org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.placementConstraint_ = placementConstraintBuilder_ == null ? placementConstraint_ : placementConstraintBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.schedulingDelay_ = schedulingDelay_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.delayUnit_ = delayUnit_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance()) return this; if (other.hasPlacementConstraint()) { mergePlacementConstraint(other.getPlacementConstraint()); } if (other.hasSchedulingDelay()) { setSchedulingDelay(other.getSchedulingDelay()); } if (other.hasDelayUnit()) { setDelayUnit(other.getDelayUnit()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasPlacementConstraint()) { return false; } if (!hasSchedulingDelay()) { return false; } if (!getPlacementConstraint().isInitialized()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPlacementConstraintFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { schedulingDelay_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(3, tmpRaw); } else { delayUnit_ = tmpRaw; bitField0_ |= 0x00000004; } break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_; /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return Whether the placementConstraint field is set. */ public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; * @return The placementConstraint. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { if (placementConstraintBuilder_ == null) { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } else { return placementConstraintBuilder_.getMessage(); } } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public Builder setPlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (value == null) { throw new NullPointerException(); } placementConstraint_ = value; } else { placementConstraintBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public Builder setPlacementConstraint( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (placementConstraintBuilder_ == null) { placementConstraint_ = builderForValue.build(); } else { placementConstraintBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public Builder mergePlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && placementConstraint_ != null && placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) { getPlacementConstraintBuilder().mergeFrom(value); } else { placementConstraint_ = value; } } else { placementConstraintBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public Builder clearPlacementConstraint() { bitField0_ = (bitField0_ & ~0x00000001); placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } onChanged(); return this; } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPlacementConstraintFieldBuilder().getBuilder(); } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { if (placementConstraintBuilder_ != null) { return placementConstraintBuilder_.getMessageOrBuilder(); } else { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } } /** * required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> getPlacementConstraintFieldBuilder() { if (placementConstraintBuilder_ == null) { placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>( getPlacementConstraint(), getParentForChildren(), isClean()); placementConstraint_ = null; } return placementConstraintBuilder_; } private long schedulingDelay_ ; /** * required int64 schedulingDelay = 2; * @return Whether the schedulingDelay field is set. */ @java.lang.Override public boolean hasSchedulingDelay() { return ((bitField0_ & 0x00000002) != 0); } /** * required int64 schedulingDelay = 2; * @return The schedulingDelay. */ @java.lang.Override public long getSchedulingDelay() { return schedulingDelay_; } /** * required int64 schedulingDelay = 2; * @param value The schedulingDelay to set. * @return This builder for chaining. */ public Builder setSchedulingDelay(long value) { schedulingDelay_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * required int64 schedulingDelay = 2; * @return This builder for chaining. */ public Builder clearSchedulingDelay() { bitField0_ = (bitField0_ & ~0x00000002); schedulingDelay_ = 0L; onChanged(); return this; } private int delayUnit_ = 1; /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return Whether the delayUnit field is set. */ @java.lang.Override public boolean hasDelayUnit() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return The delayUnit. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit() { org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit result = org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(delayUnit_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.MILLISECONDS : result; } /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @param value The delayUnit to set. * @return This builder for chaining. */ public Builder setDelayUnit(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; delayUnit_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS]; * @return This builder for chaining. */ public Builder clearDelayUnit() { bitField0_ = (bitField0_ & ~0x00000004); delayUnit_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.TimedPlacementConstraintProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.TimedPlacementConstraintProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public TimedPlacementConstraintProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CompositePlacementConstraintProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.CompositePlacementConstraintProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return Whether the compositeType field is set. */ boolean hasCompositeType(); /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return The compositeType. */ org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType(); /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ java.util.List getChildConstraintsList(); /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index); /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ int getChildConstraintsCount(); /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ java.util.List getChildConstraintsOrBuilderList(); /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder( int index); /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ java.util.List getTimedChildConstraintsList(); /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index); /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ int getTimedChildConstraintsCount(); /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ java.util.List getTimedChildConstraintsOrBuilderList(); /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder( int index); } /** * Protobuf type {@code hadoop.yarn.CompositePlacementConstraintProto} */ public static final class CompositePlacementConstraintProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.CompositePlacementConstraintProto) CompositePlacementConstraintProtoOrBuilder { private static final long serialVersionUID = 0L; // Use CompositePlacementConstraintProto.newBuilder() to construct. private CompositePlacementConstraintProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CompositePlacementConstraintProto() { compositeType_ = 1; childConstraints_ = java.util.Collections.emptyList(); timedChildConstraints_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CompositePlacementConstraintProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder.class); } /** * Protobuf enum {@code hadoop.yarn.CompositePlacementConstraintProto.CompositeType} */ public enum CompositeType implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum { /** *
       * All children constraints have to be satisfied.
       * 
* * AND = 1; */ AND(1), /** *
       * One of the children constraints has to be satisfied.
       * 
* * OR = 2; */ OR(2), /** *
       * Attempt to satisfy the first child constraint for delays[0] units (e.g.,
       * millisec or heartbeats). If this fails, try to satisfy the second child
       * constraint for delays[1] units and so on.
       * 
* * DELAYED_OR = 3; */ DELAYED_OR(3), ; /** *
       * All children constraints have to be satisfied.
       * 
* * AND = 1; */ public static final int AND_VALUE = 1; /** *
       * One of the children constraints has to be satisfied.
       * 
* * OR = 2; */ public static final int OR_VALUE = 2; /** *
       * Attempt to satisfy the first child constraint for delays[0] units (e.g.,
       * millisec or heartbeats). If this fails, try to satisfy the second child
       * constraint for delays[1] units and so on.
       * 
* * DELAYED_OR = 3; */ public static final int DELAYED_OR_VALUE = 3; public final int getNumber() { return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CompositeType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static CompositeType forNumber(int value) { switch (value) { case 1: return AND; case 2: return OR; case 3: return DELAYED_OR; default: return null; } } public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap< CompositeType> internalValueMap = new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() { public CompositeType findValueByNumber(int number) { return CompositeType.forNumber(number); } }; public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDescriptor().getEnumTypes().get(0); } private static final CompositeType[] VALUES = values(); public static CompositeType valueOf( org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private CompositeType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hadoop.yarn.CompositePlacementConstraintProto.CompositeType) } private int bitField0_; public static final int COMPOSITETYPE_FIELD_NUMBER = 1; private int compositeType_ = 1; /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return Whether the compositeType field is set. */ @java.lang.Override public boolean hasCompositeType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return The compositeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType() { org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType result = org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(compositeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.AND : result; } public static final int CHILDCONSTRAINTS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List childConstraints_; /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ @java.lang.Override public java.util.List getChildConstraintsList() { return childConstraints_; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ @java.lang.Override public java.util.List getChildConstraintsOrBuilderList() { return childConstraints_; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ @java.lang.Override public int getChildConstraintsCount() { return childConstraints_.size(); } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index) { return childConstraints_.get(index); } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder( int index) { return childConstraints_.get(index); } public static final int TIMEDCHILDCONSTRAINTS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private java.util.List timedChildConstraints_; /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ @java.lang.Override public java.util.List getTimedChildConstraintsList() { return timedChildConstraints_; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ @java.lang.Override public java.util.List getTimedChildConstraintsOrBuilderList() { return timedChildConstraints_; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ @java.lang.Override public int getTimedChildConstraintsCount() { return timedChildConstraints_.size(); } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index) { return timedChildConstraints_.get(index); } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder( int index) { return timedChildConstraints_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasCompositeType()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getChildConstraintsCount(); i++) { if (!getChildConstraints(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getTimedChildConstraintsCount(); i++) { if (!getTimedChildConstraints(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, compositeType_); } for (int i = 0; i < childConstraints_.size(); i++) { output.writeMessage(2, childConstraints_.get(i)); } for (int i = 0; i < timedChildConstraints_.size(); i++) { output.writeMessage(3, timedChildConstraints_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, compositeType_); } for (int i = 0; i < childConstraints_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, childConstraints_.get(i)); } for (int i = 0; i < timedChildConstraints_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, timedChildConstraints_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto) obj; if (hasCompositeType() != other.hasCompositeType()) return false; if (hasCompositeType()) { if (compositeType_ != other.compositeType_) return false; } if (!getChildConstraintsList() .equals(other.getChildConstraintsList())) return false; if (!getTimedChildConstraintsList() .equals(other.getTimedChildConstraintsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCompositeType()) { hash = (37 * hash) + COMPOSITETYPE_FIELD_NUMBER; hash = (53 * hash) + compositeType_; } if (getChildConstraintsCount() > 0) { hash = (37 * hash) + CHILDCONSTRAINTS_FIELD_NUMBER; hash = (53 * hash) + getChildConstraintsList().hashCode(); } if (getTimedChildConstraintsCount() > 0) { hash = (37 * hash) + TIMEDCHILDCONSTRAINTS_FIELD_NUMBER; hash = (53 * hash) + getTimedChildConstraintsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.CompositePlacementConstraintProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.CompositePlacementConstraintProto) org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; compositeType_ = 1; if (childConstraintsBuilder_ == null) { childConstraints_ = java.util.Collections.emptyList(); } else { childConstraints_ = null; childConstraintsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (timedChildConstraintsBuilder_ == null) { timedChildConstraints_ = java.util.Collections.emptyList(); } else { timedChildConstraints_ = null; timedChildConstraintsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto build() { org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result) { if (childConstraintsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { childConstraints_ = java.util.Collections.unmodifiableList(childConstraints_); bitField0_ = (bitField0_ & ~0x00000002); } result.childConstraints_ = childConstraints_; } else { result.childConstraints_ = childConstraintsBuilder_.build(); } if (timedChildConstraintsBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { timedChildConstraints_ = java.util.Collections.unmodifiableList(timedChildConstraints_); bitField0_ = (bitField0_ & ~0x00000004); } result.timedChildConstraints_ = timedChildConstraints_; } else { result.timedChildConstraints_ = timedChildConstraintsBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.compositeType_ = compositeType_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance()) return this; if (other.hasCompositeType()) { setCompositeType(other.getCompositeType()); } if (childConstraintsBuilder_ == null) { if (!other.childConstraints_.isEmpty()) { if (childConstraints_.isEmpty()) { childConstraints_ = other.childConstraints_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureChildConstraintsIsMutable(); childConstraints_.addAll(other.childConstraints_); } onChanged(); } } else { if (!other.childConstraints_.isEmpty()) { if (childConstraintsBuilder_.isEmpty()) { childConstraintsBuilder_.dispose(); childConstraintsBuilder_ = null; childConstraints_ = other.childConstraints_; bitField0_ = (bitField0_ & ~0x00000002); childConstraintsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getChildConstraintsFieldBuilder() : null; } else { childConstraintsBuilder_.addAllMessages(other.childConstraints_); } } } if (timedChildConstraintsBuilder_ == null) { if (!other.timedChildConstraints_.isEmpty()) { if (timedChildConstraints_.isEmpty()) { timedChildConstraints_ = other.timedChildConstraints_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.addAll(other.timedChildConstraints_); } onChanged(); } } else { if (!other.timedChildConstraints_.isEmpty()) { if (timedChildConstraintsBuilder_.isEmpty()) { timedChildConstraintsBuilder_.dispose(); timedChildConstraintsBuilder_ = null; timedChildConstraints_ = other.timedChildConstraints_; bitField0_ = (bitField0_ & ~0x00000004); timedChildConstraintsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTimedChildConstraintsFieldBuilder() : null; } else { timedChildConstraintsBuilder_.addAllMessages(other.timedChildConstraints_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasCompositeType()) { return false; } for (int i = 0; i < getChildConstraintsCount(); i++) { if (!getChildConstraints(i).isInitialized()) { return false; } } for (int i = 0; i < getTimedChildConstraintsCount(); i++) { if (!getTimedChildConstraints(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { compositeType_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.PARSER, extensionRegistry); if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); childConstraints_.add(m); } else { childConstraintsBuilder_.addMessage(m); } break; } // case 18 case 26: { org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.PARSER, extensionRegistry); if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.add(m); } else { timedChildConstraintsBuilder_.addMessage(m); } break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int compositeType_ = 1; /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return Whether the compositeType field is set. */ @java.lang.Override public boolean hasCompositeType() { return ((bitField0_ & 0x00000001) != 0); } /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return The compositeType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType() { org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType result = org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(compositeType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.AND : result; } /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @param value The compositeType to set. * @return This builder for chaining. */ public Builder setCompositeType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; compositeType_ = value.getNumber(); onChanged(); return this; } /** * required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1; * @return This builder for chaining. */ public Builder clearCompositeType() { bitField0_ = (bitField0_ & ~0x00000001); compositeType_ = 1; onChanged(); return this; } private java.util.List childConstraints_ = java.util.Collections.emptyList(); private void ensureChildConstraintsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { childConstraints_ = new java.util.ArrayList(childConstraints_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> childConstraintsBuilder_; /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public java.util.List getChildConstraintsList() { if (childConstraintsBuilder_ == null) { return java.util.Collections.unmodifiableList(childConstraints_); } else { return childConstraintsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public int getChildConstraintsCount() { if (childConstraintsBuilder_ == null) { return childConstraints_.size(); } else { return childConstraintsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index) { if (childConstraintsBuilder_ == null) { return childConstraints_.get(index); } else { return childConstraintsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder setChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (childConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildConstraintsIsMutable(); childConstraints_.set(index, value); onChanged(); } else { childConstraintsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder setChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); childConstraints_.set(index, builderForValue.build()); onChanged(); } else { childConstraintsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder addChildConstraints(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (childConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildConstraintsIsMutable(); childConstraints_.add(value); onChanged(); } else { childConstraintsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder addChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (childConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureChildConstraintsIsMutable(); childConstraints_.add(index, value); onChanged(); } else { childConstraintsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder addChildConstraints( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); childConstraints_.add(builderForValue.build()); onChanged(); } else { childConstraintsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder addChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); childConstraints_.add(index, builderForValue.build()); onChanged(); } else { childConstraintsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder addAllChildConstraints( java.lang.Iterable values) { if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, childConstraints_); onChanged(); } else { childConstraintsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder clearChildConstraints() { if (childConstraintsBuilder_ == null) { childConstraints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { childConstraintsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public Builder removeChildConstraints(int index) { if (childConstraintsBuilder_ == null) { ensureChildConstraintsIsMutable(); childConstraints_.remove(index); onChanged(); } else { childConstraintsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getChildConstraintsBuilder( int index) { return getChildConstraintsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder( int index) { if (childConstraintsBuilder_ == null) { return childConstraints_.get(index); } else { return childConstraintsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public java.util.List getChildConstraintsOrBuilderList() { if (childConstraintsBuilder_ != null) { return childConstraintsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(childConstraints_); } } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder addChildConstraintsBuilder() { return getChildConstraintsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder addChildConstraintsBuilder( int index) { return getChildConstraintsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2; */ public java.util.List getChildConstraintsBuilderList() { return getChildConstraintsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> getChildConstraintsFieldBuilder() { if (childConstraintsBuilder_ == null) { childConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>( childConstraints_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); childConstraints_ = null; } return childConstraintsBuilder_; } private java.util.List timedChildConstraints_ = java.util.Collections.emptyList(); private void ensureTimedChildConstraintsIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { timedChildConstraints_ = new java.util.ArrayList(timedChildConstraints_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> timedChildConstraintsBuilder_; /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public java.util.List getTimedChildConstraintsList() { if (timedChildConstraintsBuilder_ == null) { return java.util.Collections.unmodifiableList(timedChildConstraints_); } else { return timedChildConstraintsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public int getTimedChildConstraintsCount() { if (timedChildConstraintsBuilder_ == null) { return timedChildConstraints_.size(); } else { return timedChildConstraintsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index) { if (timedChildConstraintsBuilder_ == null) { return timedChildConstraints_.get(index); } else { return timedChildConstraintsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder setTimedChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) { if (timedChildConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.set(index, value); onChanged(); } else { timedChildConstraintsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder setTimedChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) { if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.set(index, builderForValue.build()); onChanged(); } else { timedChildConstraintsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder addTimedChildConstraints(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) { if (timedChildConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.add(value); onChanged(); } else { timedChildConstraintsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder addTimedChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) { if (timedChildConstraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.add(index, value); onChanged(); } else { timedChildConstraintsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder addTimedChildConstraints( org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) { if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.add(builderForValue.build()); onChanged(); } else { timedChildConstraintsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder addTimedChildConstraints( int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) { if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.add(index, builderForValue.build()); onChanged(); } else { timedChildConstraintsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder addAllTimedChildConstraints( java.lang.Iterable values) { if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, timedChildConstraints_); onChanged(); } else { timedChildConstraintsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder clearTimedChildConstraints() { if (timedChildConstraintsBuilder_ == null) { timedChildConstraints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { timedChildConstraintsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public Builder removeTimedChildConstraints(int index) { if (timedChildConstraintsBuilder_ == null) { ensureTimedChildConstraintsIsMutable(); timedChildConstraints_.remove(index); onChanged(); } else { timedChildConstraintsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder getTimedChildConstraintsBuilder( int index) { return getTimedChildConstraintsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder( int index) { if (timedChildConstraintsBuilder_ == null) { return timedChildConstraints_.get(index); } else { return timedChildConstraintsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public java.util.List getTimedChildConstraintsOrBuilderList() { if (timedChildConstraintsBuilder_ != null) { return timedChildConstraintsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(timedChildConstraints_); } } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder addTimedChildConstraintsBuilder() { return getTimedChildConstraintsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder addTimedChildConstraintsBuilder( int index) { return getTimedChildConstraintsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3; */ public java.util.List getTimedChildConstraintsBuilderList() { return getTimedChildConstraintsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> getTimedChildConstraintsFieldBuilder() { if (timedChildConstraintsBuilder_ == null) { timedChildConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder>( timedChildConstraints_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); timedChildConstraints_ = null; } return timedChildConstraintsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.CompositePlacementConstraintProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.CompositePlacementConstraintProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public CompositePlacementConstraintProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PlacementConstraintMapEntryProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintMapEntryProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated string allocation_tags = 1; * @return A list containing the allocationTags. */ java.util.List getAllocationTagsList(); /** * repeated string allocation_tags = 1; * @return The count of allocationTags. */ int getAllocationTagsCount(); /** * repeated string allocation_tags = 1; * @param index The index of the element to return. * @return The allocationTags at the given index. */ java.lang.String getAllocationTags(int index); /** * repeated string allocation_tags = 1; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index); /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return Whether the placementConstraint field is set. */ boolean hasPlacementConstraint(); /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return The placementConstraint. */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint(); /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder(); } /** *
   * This associates a set of allocation tags to a Placement Constraint.
   * 
* * Protobuf type {@code hadoop.yarn.PlacementConstraintMapEntryProto} */ public static final class PlacementConstraintMapEntryProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintMapEntryProto) PlacementConstraintMapEntryProtoOrBuilder { private static final long serialVersionUID = 0L; // Use PlacementConstraintMapEntryProto.newBuilder() to construct. private PlacementConstraintMapEntryProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private PlacementConstraintMapEntryProto() { allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PlacementConstraintMapEntryProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder.class); } private int bitField0_; public static final int ALLOCATION_TAGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_; /** * repeated string allocation_tags = 1; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_; } /** * repeated string allocation_tags = 1; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocation_tags = 1; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocation_tags = 1; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } public static final int PLACEMENT_CONSTRAINT_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return Whether the placementConstraint field is set. */ @java.lang.Override public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return The placementConstraint. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasPlacementConstraint()) { if (!getPlacementConstraint().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < allocationTags_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, allocationTags_.getRaw(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getPlacementConstraint()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < allocationTags_.size(); i++) { dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i)); } size += dataSize; size += 1 * getAllocationTagsList().size(); } if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getPlacementConstraint()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto) obj; if (!getAllocationTagsList() .equals(other.getAllocationTagsList())) return false; if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false; if (hasPlacementConstraint()) { if (!getPlacementConstraint() .equals(other.getPlacementConstraint())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAllocationTagsCount() > 0) { hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER; hash = (53 * hash) + getAllocationTagsList().hashCode(); } if (hasPlacementConstraint()) { hash = (37 * hash) + PLACEMENT_CONSTRAINT_FIELD_NUMBER; hash = (53 * hash) + getPlacementConstraint().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     * This associates a set of allocation tags to a Placement Constraint.
     * 
* * Protobuf type {@code hadoop.yarn.PlacementConstraintMapEntryProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintMapEntryProto) org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getPlacementConstraintFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto build() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result) { if (((bitField0_ & 0x00000001) != 0)) { allocationTags_ = allocationTags_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.allocationTags_ = allocationTags_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.placementConstraint_ = placementConstraintBuilder_ == null ? placementConstraint_ : placementConstraintBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance()) return this; if (!other.allocationTags_.isEmpty()) { if (allocationTags_.isEmpty()) { allocationTags_ = other.allocationTags_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAllocationTagsIsMutable(); allocationTags_.addAll(other.allocationTags_); } onChanged(); } if (other.hasPlacementConstraint()) { mergePlacementConstraint(other.getPlacementConstraint()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasPlacementConstraint()) { if (!getPlacementConstraint().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureAllocationTagsIsMutable(); allocationTags_.add(bs); break; } // case 10 case 18: { input.readMessage( getPlacementConstraintFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureAllocationTagsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_); bitField0_ |= 0x00000001; } } /** * repeated string allocation_tags = 1; * @return A list containing the allocationTags. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getAllocationTagsList() { return allocationTags_.getUnmodifiableView(); } /** * repeated string allocation_tags = 1; * @return The count of allocationTags. */ public int getAllocationTagsCount() { return allocationTags_.size(); } /** * repeated string allocation_tags = 1; * @param index The index of the element to return. * @return The allocationTags at the given index. */ public java.lang.String getAllocationTags(int index) { return allocationTags_.get(index); } /** * repeated string allocation_tags = 1; * @param index The index of the value to return. * @return The bytes of the allocationTags at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getAllocationTagsBytes(int index) { return allocationTags_.getByteString(index); } /** * repeated string allocation_tags = 1; * @param index The index to set the value at. * @param value The allocationTags to set. * @return This builder for chaining. */ public Builder setAllocationTags( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.set(index, value); onChanged(); return this; } /** * repeated string allocation_tags = 1; * @param value The allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTags( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } /** * repeated string allocation_tags = 1; * @param values The allocationTags to add. * @return This builder for chaining. */ public Builder addAllAllocationTags( java.lang.Iterable values) { ensureAllocationTagsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, allocationTags_); onChanged(); return this; } /** * repeated string allocation_tags = 1; * @return This builder for chaining. */ public Builder clearAllocationTags() { allocationTags_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * repeated string allocation_tags = 1; * @param value The bytes of the allocationTags to add. * @return This builder for chaining. */ public Builder addAllocationTagsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureAllocationTagsIsMutable(); allocationTags_.add(value); onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_; /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return Whether the placementConstraint field is set. */ public boolean hasPlacementConstraint() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; * @return The placementConstraint. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() { if (placementConstraintBuilder_ == null) { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } else { return placementConstraintBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public Builder setPlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (value == null) { throw new NullPointerException(); } placementConstraint_ = value; } else { placementConstraintBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public Builder setPlacementConstraint( org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) { if (placementConstraintBuilder_ == null) { placementConstraint_ = builderForValue.build(); } else { placementConstraintBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public Builder mergePlacementConstraint(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) { if (placementConstraintBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && placementConstraint_ != null && placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) { getPlacementConstraintBuilder().mergeFrom(value); } else { placementConstraint_ = value; } } else { placementConstraintBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public Builder clearPlacementConstraint() { bitField0_ = (bitField0_ & ~0x00000002); placementConstraint_ = null; if (placementConstraintBuilder_ != null) { placementConstraintBuilder_.dispose(); placementConstraintBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() { bitField0_ |= 0x00000002; onChanged(); return getPlacementConstraintFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() { if (placementConstraintBuilder_ != null) { return placementConstraintBuilder_.getMessageOrBuilder(); } else { return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_; } } /** * optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> getPlacementConstraintFieldBuilder() { if (placementConstraintBuilder_ == null) { placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>( getPlacementConstraint(), getParentForChildren(), isClean()); placementConstraint_ = null; } return placementConstraintBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintMapEntryProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintMapEntryProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public PlacementConstraintMapEntryProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationIdProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationIdProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int64 id = 1; * @return Whether the id field is set. */ boolean hasId(); /** * optional int64 id = 1; * @return The id. */ long getId(); /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ boolean hasClusterTimestamp(); /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ long getClusterTimestamp(); } /** * Protobuf type {@code hadoop.yarn.ReservationIdProto} */ public static final class ReservationIdProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationIdProto) ReservationIdProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationIdProto.newBuilder() to construct. private ReservationIdProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationIdProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReservationIdProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder.class); } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; private long id_ = 0L; /** * optional int64 id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 id = 1; * @return The id. */ @java.lang.Override public long getId() { return id_; } public static final int CLUSTER_TIMESTAMP_FIELD_NUMBER = 2; private long clusterTimestamp_ = 0L; /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ @java.lang.Override public boolean hasClusterTimestamp() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ @java.lang.Override public long getClusterTimestamp() { return clusterTimestamp_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(1, id_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, clusterTimestamp_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(1, id_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, clusterTimestamp_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto) obj; if (hasId() != other.hasId()) return false; if (hasId()) { if (getId() != other.getId()) return false; } if (hasClusterTimestamp() != other.hasClusterTimestamp()) return false; if (hasClusterTimestamp()) { if (getClusterTimestamp() != other.getClusterTimestamp()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getId()); } if (hasClusterTimestamp()) { hash = (37 * hash) + CLUSTER_TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getClusterTimestamp()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationIdProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationIdProto) org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = 0L; clusterTimestamp_ = 0L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = id_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.clusterTimestamp_ = clusterTimestamp_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) return this; if (other.hasId()) { setId(other.getId()); } if (other.hasClusterTimestamp()) { setClusterTimestamp(other.getClusterTimestamp()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { id_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { clusterTimestamp_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long id_ ; /** * optional int64 id = 1; * @return Whether the id field is set. */ @java.lang.Override public boolean hasId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 id = 1; * @return The id. */ @java.lang.Override public long getId() { return id_; } /** * optional int64 id = 1; * @param value The id to set. * @return This builder for chaining. */ public Builder setId(long value) { id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int64 id = 1; * @return This builder for chaining. */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = 0L; onChanged(); return this; } private long clusterTimestamp_ ; /** * optional int64 cluster_timestamp = 2; * @return Whether the clusterTimestamp field is set. */ @java.lang.Override public boolean hasClusterTimestamp() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 cluster_timestamp = 2; * @return The clusterTimestamp. */ @java.lang.Override public long getClusterTimestamp() { return clusterTimestamp_; } /** * optional int64 cluster_timestamp = 2; * @param value The clusterTimestamp to set. * @return This builder for chaining. */ public Builder setClusterTimestamp(long value) { clusterTimestamp_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 cluster_timestamp = 2; * @return This builder for chaining. */ public Builder clearClusterTimestamp() { bitField0_ = (bitField0_ & ~0x00000002); clusterTimestamp_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationIdProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationIdProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationIdProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return Whether the capability field is set. */ boolean hasCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return The capability. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder(); /** * optional int32 num_containers = 2 [default = 1]; * @return Whether the numContainers field is set. */ boolean hasNumContainers(); /** * optional int32 num_containers = 2 [default = 1]; * @return The numContainers. */ int getNumContainers(); /** * optional int32 concurrency = 3 [default = 1]; * @return Whether the concurrency field is set. */ boolean hasConcurrency(); /** * optional int32 concurrency = 3 [default = 1]; * @return The concurrency. */ int getConcurrency(); /** * optional int64 duration = 4 [default = -1]; * @return Whether the duration field is set. */ boolean hasDuration(); /** * optional int64 duration = 4 [default = -1]; * @return The duration. */ long getDuration(); } /** * Protobuf type {@code hadoop.yarn.ReservationRequestProto} */ public static final class ReservationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationRequestProto) ReservationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationRequestProto.newBuilder() to construct. private ReservationRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationRequestProto() { numContainers_ = 1; concurrency_ = 1; duration_ = -1L; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReservationRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder.class); } private int bitField0_; public static final int CAPABILITY_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return Whether the capability field is set. */ @java.lang.Override public boolean hasCapability() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return The capability. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } public static final int NUM_CONTAINERS_FIELD_NUMBER = 2; private int numContainers_ = 1; /** * optional int32 num_containers = 2 [default = 1]; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_containers = 2 [default = 1]; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } public static final int CONCURRENCY_FIELD_NUMBER = 3; private int concurrency_ = 1; /** * optional int32 concurrency = 3 [default = 1]; * @return Whether the concurrency field is set. */ @java.lang.Override public boolean hasConcurrency() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 concurrency = 3 [default = 1]; * @return The concurrency. */ @java.lang.Override public int getConcurrency() { return concurrency_; } public static final int DURATION_FIELD_NUMBER = 4; private long duration_ = -1L; /** * optional int64 duration = 4 [default = -1]; * @return Whether the duration field is set. */ @java.lang.Override public boolean hasDuration() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 duration = 4 [default = -1]; * @return The duration. */ @java.lang.Override public long getDuration() { return duration_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasCapability()) { if (!getCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCapability()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, numContainers_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(3, concurrency_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt64(4, duration_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getCapability()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(2, numContainers_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, concurrency_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(4, duration_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto) obj; if (hasCapability() != other.hasCapability()) return false; if (hasCapability()) { if (!getCapability() .equals(other.getCapability())) return false; } if (hasNumContainers() != other.hasNumContainers()) return false; if (hasNumContainers()) { if (getNumContainers() != other.getNumContainers()) return false; } if (hasConcurrency() != other.hasConcurrency()) return false; if (hasConcurrency()) { if (getConcurrency() != other.getConcurrency()) return false; } if (hasDuration() != other.hasDuration()) return false; if (hasDuration()) { if (getDuration() != other.getDuration()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCapability()) { hash = (37 * hash) + CAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getCapability().hashCode(); } if (hasNumContainers()) { hash = (37 * hash) + NUM_CONTAINERS_FIELD_NUMBER; hash = (53 * hash) + getNumContainers(); } if (hasConcurrency()) { hash = (37 * hash) + CONCURRENCY_FIELD_NUMBER; hash = (53 * hash) + getConcurrency(); } if (hasDuration()) { hash = (37 * hash) + DURATION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getDuration()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCapabilityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } numContainers_ = 1; concurrency_ = 1; duration_ = -1L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.capability_ = capabilityBuilder_ == null ? capability_ : capabilityBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.numContainers_ = numContainers_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.concurrency_ = concurrency_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.duration_ = duration_; to_bitField0_ |= 0x00000008; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance()) return this; if (other.hasCapability()) { mergeCapability(other.getCapability()); } if (other.hasNumContainers()) { setNumContainers(other.getNumContainers()); } if (other.hasConcurrency()) { setConcurrency(other.getConcurrency()); } if (other.hasDuration()) { setDuration(other.getDuration()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasCapability()) { if (!getCapability().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getCapabilityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { numContainers_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { concurrency_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { duration_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return Whether the capability field is set. */ public boolean hasCapability() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 1; * @return The capability. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { if (capabilityBuilder_ == null) { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } else { return capabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public Builder setCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capability_ = value; } else { capabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public Builder setCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (capabilityBuilder_ == null) { capability_ = builderForValue.build(); } else { capabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public Builder mergeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && capability_ != null && capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getCapabilityBuilder().mergeFrom(value); } else { capability_ = value; } } else { capabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public Builder clearCapability() { bitField0_ = (bitField0_ & ~0x00000001); capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { if (capabilityBuilder_ != null) { return capabilityBuilder_.getMessageOrBuilder(); } else { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } } /** * optional .hadoop.yarn.ResourceProto capability = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getCapabilityFieldBuilder() { if (capabilityBuilder_ == null) { capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getCapability(), getParentForChildren(), isClean()); capability_ = null; } return capabilityBuilder_; } private int numContainers_ = 1; /** * optional int32 num_containers = 2 [default = 1]; * @return Whether the numContainers field is set. */ @java.lang.Override public boolean hasNumContainers() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 num_containers = 2 [default = 1]; * @return The numContainers. */ @java.lang.Override public int getNumContainers() { return numContainers_; } /** * optional int32 num_containers = 2 [default = 1]; * @param value The numContainers to set. * @return This builder for chaining. */ public Builder setNumContainers(int value) { numContainers_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int32 num_containers = 2 [default = 1]; * @return This builder for chaining. */ public Builder clearNumContainers() { bitField0_ = (bitField0_ & ~0x00000002); numContainers_ = 1; onChanged(); return this; } private int concurrency_ = 1; /** * optional int32 concurrency = 3 [default = 1]; * @return Whether the concurrency field is set. */ @java.lang.Override public boolean hasConcurrency() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 concurrency = 3 [default = 1]; * @return The concurrency. */ @java.lang.Override public int getConcurrency() { return concurrency_; } /** * optional int32 concurrency = 3 [default = 1]; * @param value The concurrency to set. * @return This builder for chaining. */ public Builder setConcurrency(int value) { concurrency_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 concurrency = 3 [default = 1]; * @return This builder for chaining. */ public Builder clearConcurrency() { bitField0_ = (bitField0_ & ~0x00000004); concurrency_ = 1; onChanged(); return this; } private long duration_ = -1L; /** * optional int64 duration = 4 [default = -1]; * @return Whether the duration field is set. */ @java.lang.Override public boolean hasDuration() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 duration = 4 [default = -1]; * @return The duration. */ @java.lang.Override public long getDuration() { return duration_; } /** * optional int64 duration = 4 [default = -1]; * @param value The duration to set. * @return This builder for chaining. */ public Builder setDuration(long value) { duration_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int64 duration = 4 [default = -1]; * @return This builder for chaining. */ public Builder clearDuration() { bitField0_ = (bitField0_ & ~0x00000008); duration_ = -1L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationRequestsProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationRequestsProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ java.util.List getReservationResourcesList(); /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index); /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ int getReservationResourcesCount(); /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ java.util.List getReservationResourcesOrBuilderList(); /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder( int index); /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return Whether the interpreter field is set. */ boolean hasInterpreter(); /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return The interpreter. */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter(); } /** * Protobuf type {@code hadoop.yarn.ReservationRequestsProto} */ public static final class ReservationRequestsProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationRequestsProto) ReservationRequestsProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationRequestsProto.newBuilder() to construct. private ReservationRequestsProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationRequestsProto() { reservationResources_ = java.util.Collections.emptyList(); interpreter_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReservationRequestsProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder.class); } private int bitField0_; public static final int RESERVATION_RESOURCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List reservationResources_; /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ @java.lang.Override public java.util.List getReservationResourcesList() { return reservationResources_; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ @java.lang.Override public java.util.List getReservationResourcesOrBuilderList() { return reservationResources_; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ @java.lang.Override public int getReservationResourcesCount() { return reservationResources_.size(); } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index) { return reservationResources_.get(index); } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder( int index) { return reservationResources_.get(index); } public static final int INTERPRETER_FIELD_NUMBER = 2; private int interpreter_ = 1; /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return Whether the interpreter field is set. */ @java.lang.Override public boolean hasInterpreter() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return The interpreter. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto result = org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(interpreter_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.R_ALL : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getReservationResourcesCount(); i++) { if (!getReservationResources(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < reservationResources_.size(); i++) { output.writeMessage(1, reservationResources_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(2, interpreter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < reservationResources_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, reservationResources_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, interpreter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto) obj; if (!getReservationResourcesList() .equals(other.getReservationResourcesList())) return false; if (hasInterpreter() != other.hasInterpreter()) return false; if (hasInterpreter()) { if (interpreter_ != other.interpreter_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getReservationResourcesCount() > 0) { hash = (37 * hash) + RESERVATION_RESOURCES_FIELD_NUMBER; hash = (53 * hash) + getReservationResourcesList().hashCode(); } if (hasInterpreter()) { hash = (37 * hash) + INTERPRETER_FIELD_NUMBER; hash = (53 * hash) + interpreter_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationRequestsProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationRequestsProto) org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (reservationResourcesBuilder_ == null) { reservationResources_ = java.util.Collections.emptyList(); } else { reservationResources_ = null; reservationResourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); interpreter_ = 1; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result) { if (reservationResourcesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { reservationResources_ = java.util.Collections.unmodifiableList(reservationResources_); bitField0_ = (bitField0_ & ~0x00000001); } result.reservationResources_ = reservationResources_; } else { result.reservationResources_ = reservationResourcesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.interpreter_ = interpreter_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance()) return this; if (reservationResourcesBuilder_ == null) { if (!other.reservationResources_.isEmpty()) { if (reservationResources_.isEmpty()) { reservationResources_ = other.reservationResources_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureReservationResourcesIsMutable(); reservationResources_.addAll(other.reservationResources_); } onChanged(); } } else { if (!other.reservationResources_.isEmpty()) { if (reservationResourcesBuilder_.isEmpty()) { reservationResourcesBuilder_.dispose(); reservationResourcesBuilder_ = null; reservationResources_ = other.reservationResources_; bitField0_ = (bitField0_ & ~0x00000001); reservationResourcesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReservationResourcesFieldBuilder() : null; } else { reservationResourcesBuilder_.addAllMessages(other.reservationResources_); } } } if (other.hasInterpreter()) { setInterpreter(other.getInterpreter()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { for (int i = 0; i < getReservationResourcesCount(); i++) { if (!getReservationResources(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.PARSER, extensionRegistry); if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); reservationResources_.add(m); } else { reservationResourcesBuilder_.addMessage(m); } break; } // case 10 case 16: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { interpreter_ = tmpRaw; bitField0_ |= 0x00000002; } break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List reservationResources_ = java.util.Collections.emptyList(); private void ensureReservationResourcesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { reservationResources_ = new java.util.ArrayList(reservationResources_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> reservationResourcesBuilder_; /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public java.util.List getReservationResourcesList() { if (reservationResourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(reservationResources_); } else { return reservationResourcesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public int getReservationResourcesCount() { if (reservationResourcesBuilder_ == null) { return reservationResources_.size(); } else { return reservationResourcesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index) { if (reservationResourcesBuilder_ == null) { return reservationResources_.get(index); } else { return reservationResourcesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder setReservationResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) { if (reservationResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationResourcesIsMutable(); reservationResources_.set(index, value); onChanged(); } else { reservationResourcesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder setReservationResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) { if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); reservationResources_.set(index, builderForValue.build()); onChanged(); } else { reservationResourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder addReservationResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) { if (reservationResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationResourcesIsMutable(); reservationResources_.add(value); onChanged(); } else { reservationResourcesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder addReservationResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) { if (reservationResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReservationResourcesIsMutable(); reservationResources_.add(index, value); onChanged(); } else { reservationResourcesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder addReservationResources( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) { if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); reservationResources_.add(builderForValue.build()); onChanged(); } else { reservationResourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder addReservationResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) { if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); reservationResources_.add(index, builderForValue.build()); onChanged(); } else { reservationResourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder addAllReservationResources( java.lang.Iterable values) { if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, reservationResources_); onChanged(); } else { reservationResourcesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder clearReservationResources() { if (reservationResourcesBuilder_ == null) { reservationResources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { reservationResourcesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public Builder removeReservationResources(int index) { if (reservationResourcesBuilder_ == null) { ensureReservationResourcesIsMutable(); reservationResources_.remove(index); onChanged(); } else { reservationResourcesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder getReservationResourcesBuilder( int index) { return getReservationResourcesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder( int index) { if (reservationResourcesBuilder_ == null) { return reservationResources_.get(index); } else { return reservationResourcesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public java.util.List getReservationResourcesOrBuilderList() { if (reservationResourcesBuilder_ != null) { return reservationResourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(reservationResources_); } } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder addReservationResourcesBuilder() { return getReservationResourcesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder addReservationResourcesBuilder( int index) { return getReservationResourcesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1; */ public java.util.List getReservationResourcesBuilderList() { return getReservationResourcesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> getReservationResourcesFieldBuilder() { if (reservationResourcesBuilder_ == null) { reservationResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder>( reservationResources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); reservationResources_ = null; } return reservationResourcesBuilder_; } private int interpreter_ = 1; /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return Whether the interpreter field is set. */ @java.lang.Override public boolean hasInterpreter() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return The interpreter. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto result = org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(interpreter_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.R_ALL : result; } /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @param value The interpreter to set. * @return This builder for chaining. */ public Builder setInterpreter(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; interpreter_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL]; * @return This builder for chaining. */ public Builder clearInterpreter() { bitField0_ = (bitField0_ & ~0x00000002); interpreter_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationRequestsProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationRequestsProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationRequestsProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationDefinitionProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDefinitionProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return Whether the reservationRequests field is set. */ boolean hasReservationRequests(); /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return The reservationRequests. */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests(); /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder(); /** * optional int64 arrival = 2; * @return Whether the arrival field is set. */ boolean hasArrival(); /** * optional int64 arrival = 2; * @return The arrival. */ long getArrival(); /** * optional int64 deadline = 3; * @return Whether the deadline field is set. */ boolean hasDeadline(); /** * optional int64 deadline = 3; * @return The deadline. */ long getDeadline(); /** * optional string reservation_name = 4; * @return Whether the reservationName field is set. */ boolean hasReservationName(); /** * optional string reservation_name = 4; * @return The reservationName. */ java.lang.String getReservationName(); /** * optional string reservation_name = 4; * @return The bytes for reservationName. */ org.apache.hadoop.thirdparty.protobuf.ByteString getReservationNameBytes(); /** * optional string recurrence_expression = 5 [default = "0"]; * @return Whether the recurrenceExpression field is set. */ boolean hasRecurrenceExpression(); /** * optional string recurrence_expression = 5 [default = "0"]; * @return The recurrenceExpression. */ java.lang.String getRecurrenceExpression(); /** * optional string recurrence_expression = 5 [default = "0"]; * @return The bytes for recurrenceExpression. */ org.apache.hadoop.thirdparty.protobuf.ByteString getRecurrenceExpressionBytes(); /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return Whether the priority field is set. */ boolean hasPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return The priority. */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority(); /** * optional .hadoop.yarn.PriorityProto priority = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ReservationDefinitionProto} */ public static final class ReservationDefinitionProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDefinitionProto) ReservationDefinitionProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationDefinitionProto.newBuilder() to construct. private ReservationDefinitionProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationDefinitionProto() { reservationName_ = ""; recurrenceExpression_ = "0"; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReservationDefinitionProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder.class); } private int bitField0_; public static final int RESERVATION_REQUESTS_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto reservationRequests_; /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return Whether the reservationRequests field is set. */ @java.lang.Override public boolean hasReservationRequests() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return The reservationRequests. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests() { return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_; } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder() { return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_; } public static final int ARRIVAL_FIELD_NUMBER = 2; private long arrival_ = 0L; /** * optional int64 arrival = 2; * @return Whether the arrival field is set. */ @java.lang.Override public boolean hasArrival() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 arrival = 2; * @return The arrival. */ @java.lang.Override public long getArrival() { return arrival_; } public static final int DEADLINE_FIELD_NUMBER = 3; private long deadline_ = 0L; /** * optional int64 deadline = 3; * @return Whether the deadline field is set. */ @java.lang.Override public boolean hasDeadline() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 deadline = 3; * @return The deadline. */ @java.lang.Override public long getDeadline() { return deadline_; } public static final int RESERVATION_NAME_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object reservationName_ = ""; /** * optional string reservation_name = 4; * @return Whether the reservationName field is set. */ @java.lang.Override public boolean hasReservationName() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string reservation_name = 4; * @return The reservationName. */ @java.lang.Override public java.lang.String getReservationName() { java.lang.Object ref = reservationName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reservationName_ = s; } return s; } } /** * optional string reservation_name = 4; * @return The bytes for reservationName. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getReservationNameBytes() { java.lang.Object ref = reservationName_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reservationName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int RECURRENCE_EXPRESSION_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object recurrenceExpression_ = "0"; /** * optional string recurrence_expression = 5 [default = "0"]; * @return Whether the recurrenceExpression field is set. */ @java.lang.Override public boolean hasRecurrenceExpression() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string recurrence_expression = 5 [default = "0"]; * @return The recurrenceExpression. */ @java.lang.Override public java.lang.String getRecurrenceExpression() { java.lang.Object ref = recurrenceExpression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { recurrenceExpression_ = s; } return s; } } /** * optional string recurrence_expression = 5 [default = "0"]; * @return The bytes for recurrenceExpression. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRecurrenceExpressionBytes() { java.lang.Object ref = recurrenceExpression_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); recurrenceExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int PRIORITY_FIELD_NUMBER = 6; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return Whether the priority field is set. */ @java.lang.Override public boolean hasPriority() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return The priority. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasReservationRequests()) { if (!getReservationRequests().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReservationRequests()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, arrival_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(3, deadline_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, reservationName_); } if (((bitField0_ & 0x00000010) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, recurrenceExpression_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeMessage(6, getPriority()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getReservationRequests()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, arrival_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, deadline_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, reservationName_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, recurrenceExpression_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, getPriority()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto) obj; if (hasReservationRequests() != other.hasReservationRequests()) return false; if (hasReservationRequests()) { if (!getReservationRequests() .equals(other.getReservationRequests())) return false; } if (hasArrival() != other.hasArrival()) return false; if (hasArrival()) { if (getArrival() != other.getArrival()) return false; } if (hasDeadline() != other.hasDeadline()) return false; if (hasDeadline()) { if (getDeadline() != other.getDeadline()) return false; } if (hasReservationName() != other.hasReservationName()) return false; if (hasReservationName()) { if (!getReservationName() .equals(other.getReservationName())) return false; } if (hasRecurrenceExpression() != other.hasRecurrenceExpression()) return false; if (hasRecurrenceExpression()) { if (!getRecurrenceExpression() .equals(other.getRecurrenceExpression())) return false; } if (hasPriority() != other.hasPriority()) return false; if (hasPriority()) { if (!getPriority() .equals(other.getPriority())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReservationRequests()) { hash = (37 * hash) + RESERVATION_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getReservationRequests().hashCode(); } if (hasArrival()) { hash = (37 * hash) + ARRIVAL_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getArrival()); } if (hasDeadline()) { hash = (37 * hash) + DEADLINE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getDeadline()); } if (hasReservationName()) { hash = (37 * hash) + RESERVATION_NAME_FIELD_NUMBER; hash = (53 * hash) + getReservationName().hashCode(); } if (hasRecurrenceExpression()) { hash = (37 * hash) + RECURRENCE_EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getRecurrenceExpression().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationDefinitionProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDefinitionProto) org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationRequestsFieldBuilder(); getPriorityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; reservationRequests_ = null; if (reservationRequestsBuilder_ != null) { reservationRequestsBuilder_.dispose(); reservationRequestsBuilder_ = null; } arrival_ = 0L; deadline_ = 0L; reservationName_ = ""; recurrenceExpression_ = "0"; priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.reservationRequests_ = reservationRequestsBuilder_ == null ? reservationRequests_ : reservationRequestsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.arrival_ = arrival_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.deadline_ = deadline_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.reservationName_ = reservationName_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.recurrenceExpression_ = recurrenceExpression_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.priority_ = priorityBuilder_ == null ? priority_ : priorityBuilder_.build(); to_bitField0_ |= 0x00000020; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) return this; if (other.hasReservationRequests()) { mergeReservationRequests(other.getReservationRequests()); } if (other.hasArrival()) { setArrival(other.getArrival()); } if (other.hasDeadline()) { setDeadline(other.getDeadline()); } if (other.hasReservationName()) { reservationName_ = other.reservationName_; bitField0_ |= 0x00000008; onChanged(); } if (other.hasRecurrenceExpression()) { recurrenceExpression_ = other.recurrenceExpression_; bitField0_ |= 0x00000010; onChanged(); } if (other.hasPriority()) { mergePriority(other.getPriority()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasReservationRequests()) { if (!getReservationRequests().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getReservationRequestsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { arrival_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { deadline_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { reservationName_ = input.readBytes(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { recurrenceExpression_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 case 50: { input.readMessage( getPriorityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000020; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto reservationRequests_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder> reservationRequestsBuilder_; /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return Whether the reservationRequests field is set. */ public boolean hasReservationRequests() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; * @return The reservationRequests. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests() { if (reservationRequestsBuilder_ == null) { return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_; } else { return reservationRequestsBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public Builder setReservationRequests(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto value) { if (reservationRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationRequests_ = value; } else { reservationRequestsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public Builder setReservationRequests( org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder builderForValue) { if (reservationRequestsBuilder_ == null) { reservationRequests_ = builderForValue.build(); } else { reservationRequestsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public Builder mergeReservationRequests(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto value) { if (reservationRequestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reservationRequests_ != null && reservationRequests_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance()) { getReservationRequestsBuilder().mergeFrom(value); } else { reservationRequests_ = value; } } else { reservationRequestsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public Builder clearReservationRequests() { bitField0_ = (bitField0_ & ~0x00000001); reservationRequests_ = null; if (reservationRequestsBuilder_ != null) { reservationRequestsBuilder_.dispose(); reservationRequestsBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder getReservationRequestsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReservationRequestsFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder() { if (reservationRequestsBuilder_ != null) { return reservationRequestsBuilder_.getMessageOrBuilder(); } else { return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_; } } /** * optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder> getReservationRequestsFieldBuilder() { if (reservationRequestsBuilder_ == null) { reservationRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder>( getReservationRequests(), getParentForChildren(), isClean()); reservationRequests_ = null; } return reservationRequestsBuilder_; } private long arrival_ ; /** * optional int64 arrival = 2; * @return Whether the arrival field is set. */ @java.lang.Override public boolean hasArrival() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 arrival = 2; * @return The arrival. */ @java.lang.Override public long getArrival() { return arrival_; } /** * optional int64 arrival = 2; * @param value The arrival to set. * @return This builder for chaining. */ public Builder setArrival(long value) { arrival_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 arrival = 2; * @return This builder for chaining. */ public Builder clearArrival() { bitField0_ = (bitField0_ & ~0x00000002); arrival_ = 0L; onChanged(); return this; } private long deadline_ ; /** * optional int64 deadline = 3; * @return Whether the deadline field is set. */ @java.lang.Override public boolean hasDeadline() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 deadline = 3; * @return The deadline. */ @java.lang.Override public long getDeadline() { return deadline_; } /** * optional int64 deadline = 3; * @param value The deadline to set. * @return This builder for chaining. */ public Builder setDeadline(long value) { deadline_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 deadline = 3; * @return This builder for chaining. */ public Builder clearDeadline() { bitField0_ = (bitField0_ & ~0x00000004); deadline_ = 0L; onChanged(); return this; } private java.lang.Object reservationName_ = ""; /** * optional string reservation_name = 4; * @return Whether the reservationName field is set. */ public boolean hasReservationName() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string reservation_name = 4; * @return The reservationName. */ public java.lang.String getReservationName() { java.lang.Object ref = reservationName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { reservationName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string reservation_name = 4; * @return The bytes for reservationName. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getReservationNameBytes() { java.lang.Object ref = reservationName_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); reservationName_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string reservation_name = 4; * @param value The reservationName to set. * @return This builder for chaining. */ public Builder setReservationName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } reservationName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional string reservation_name = 4; * @return This builder for chaining. */ public Builder clearReservationName() { reservationName_ = getDefaultInstance().getReservationName(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * optional string reservation_name = 4; * @param value The bytes for reservationName to set. * @return This builder for chaining. */ public Builder setReservationNameBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } reservationName_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object recurrenceExpression_ = "0"; /** * optional string recurrence_expression = 5 [default = "0"]; * @return Whether the recurrenceExpression field is set. */ public boolean hasRecurrenceExpression() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string recurrence_expression = 5 [default = "0"]; * @return The recurrenceExpression. */ public java.lang.String getRecurrenceExpression() { java.lang.Object ref = recurrenceExpression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { recurrenceExpression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string recurrence_expression = 5 [default = "0"]; * @return The bytes for recurrenceExpression. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getRecurrenceExpressionBytes() { java.lang.Object ref = recurrenceExpression_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); recurrenceExpression_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string recurrence_expression = 5 [default = "0"]; * @param value The recurrenceExpression to set. * @return This builder for chaining. */ public Builder setRecurrenceExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } recurrenceExpression_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string recurrence_expression = 5 [default = "0"]; * @return This builder for chaining. */ public Builder clearRecurrenceExpression() { recurrenceExpression_ = getDefaultInstance().getRecurrenceExpression(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string recurrence_expression = 5 [default = "0"]; * @param value The bytes for recurrenceExpression to set. * @return This builder for chaining. */ public Builder setRecurrenceExpressionBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } recurrenceExpression_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_; /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return Whether the priority field is set. */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.PriorityProto priority = 6; * @return The priority. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() { if (priorityBuilder_ == null) { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } else { return priorityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public Builder setPriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } priority_ = value; } else { priorityBuilder_.setMessage(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public Builder setPriority( org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) { if (priorityBuilder_ == null) { priority_ = builderForValue.build(); } else { priorityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public Builder mergePriority(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) { if (priorityBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && priority_ != null && priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) { getPriorityBuilder().mergeFrom(value); } else { priority_ = value; } } else { priorityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000020); priority_ = null; if (priorityBuilder_ != null) { priorityBuilder_.dispose(); priorityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() { bitField0_ |= 0x00000020; onChanged(); return getPriorityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() { if (priorityBuilder_ != null) { return priorityBuilder_.getMessageOrBuilder(); } else { return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_; } } /** * optional .hadoop.yarn.PriorityProto priority = 6; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> getPriorityFieldBuilder() { if (priorityBuilder_ == null) { priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>( getPriority(), getParentForChildren(), isClean()); priority_ = null; } return priorityBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDefinitionProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDefinitionProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationDefinitionProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResourceAllocationRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceAllocationRequestProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional int64 start_time = 1; * @return Whether the startTime field is set. */ boolean hasStartTime(); /** * optional int64 start_time = 1; * @return The startTime. */ long getStartTime(); /** * optional int64 end_time = 2; * @return Whether the endTime field is set. */ boolean hasEndTime(); /** * optional int64 end_time = 2; * @return The endTime. */ long getEndTime(); /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return Whether the resource field is set. */ boolean hasResource(); /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return The resource. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource(); /** * optional .hadoop.yarn.ResourceProto resource = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ResourceAllocationRequestProto} */ public static final class ResourceAllocationRequestProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceAllocationRequestProto) ResourceAllocationRequestProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ResourceAllocationRequestProto.newBuilder() to construct. private ResourceAllocationRequestProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ResourceAllocationRequestProto() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ResourceAllocationRequestProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder.class); } private int bitField0_; public static final int START_TIME_FIELD_NUMBER = 1; private long startTime_ = 0L; /** * optional int64 start_time = 1; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 start_time = 1; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } public static final int END_TIME_FIELD_NUMBER = 2; private long endTime_ = 0L; /** * optional int64 end_time = 2; * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 end_time = 2; * @return The endTime. */ @java.lang.Override public long getEndTime() { return endTime_; } public static final int RESOURCE_FIELD_NUMBER = 3; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return Whether the resource field is set. */ @java.lang.Override public boolean hasResource() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return The resource. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasResource()) { if (!getResource().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt64(1, startTime_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(2, endTime_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeMessage(3, getResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(1, startTime_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(2, endTime_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, getResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto) obj; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (getStartTime() != other.getStartTime()) return false; } if (hasEndTime() != other.hasEndTime()) return false; if (hasEndTime()) { if (getEndTime() != other.getEndTime()) return false; } if (hasResource() != other.hasResource()) return false; if (hasResource()) { if (!getResource() .equals(other.getResource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartTime()); } if (hasEndTime()) { hash = (37 * hash) + END_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getEndTime()); } if (hasResource()) { hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ResourceAllocationRequestProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceAllocationRequestProto) org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; startTime_ = 0L; endTime_ = 0L; resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startTime_ = startTime_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.endTime_ = endTime_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build(); to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance()) return this; if (other.hasStartTime()) { setStartTime(other.getStartTime()); } if (other.hasEndTime()) { setEndTime(other.getEndTime()); } if (other.hasResource()) { mergeResource(other.getResource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasResource()) { if (!getResource().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { startTime_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { endTime_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { input.readMessage( getResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long startTime_ ; /** * optional int64 start_time = 1; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * optional int64 start_time = 1; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } /** * optional int64 start_time = 1; * @param value The startTime to set. * @return This builder for chaining. */ public Builder setStartTime(long value) { startTime_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional int64 start_time = 1; * @return This builder for chaining. */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000001); startTime_ = 0L; onChanged(); return this; } private long endTime_ ; /** * optional int64 end_time = 2; * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 end_time = 2; * @return The endTime. */ @java.lang.Override public long getEndTime() { return endTime_; } /** * optional int64 end_time = 2; * @param value The endTime to set. * @return This builder for chaining. */ public Builder setEndTime(long value) { endTime_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional int64 end_time = 2; * @return This builder for chaining. */ public Builder clearEndTime() { bitField0_ = (bitField0_ & ~0x00000002); endTime_ = 0L; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_; /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return Whether the resource field is set. */ public boolean hasResource() { return ((bitField0_ & 0x00000004) != 0); } /** * optional .hadoop.yarn.ResourceProto resource = 3; * @return The resource. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() { if (resourceBuilder_ == null) { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } else { return resourceBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public Builder setResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } resource_ = value; } else { resourceBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public Builder setResource( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (resourceBuilder_ == null) { resource_ = builderForValue.build(); } else { resourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public Builder mergeResource(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (resourceBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && resource_ != null && resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getResourceBuilder().mergeFrom(value); } else { resource_ = value; } } else { resourceBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public Builder clearResource() { bitField0_ = (bitField0_ & ~0x00000004); resource_ = null; if (resourceBuilder_ != null) { resourceBuilder_.dispose(); resourceBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() { bitField0_ |= 0x00000004; onChanged(); return getResourceFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() { if (resourceBuilder_ != null) { return resourceBuilder_.getMessageOrBuilder(); } else { return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_; } } /** * optional .hadoop.yarn.ResourceProto resource = 3; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getResourceFieldBuilder() { if (resourceBuilder_ == null) { resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getResource(), getParentForChildren(), isClean()); resource_ = null; } return resourceBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceAllocationRequestProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceAllocationRequestProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ResourceAllocationRequestProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ReservationAllocationStateProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationAllocationStateProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return Whether the reservationDefinition field is set. */ boolean hasReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return The reservationDefinition. */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition(); /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder(); /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ java.util.List getAllocationRequestsList(); /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index); /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ int getAllocationRequestsCount(); /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ java.util.List getAllocationRequestsOrBuilderList(); /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder( int index); /** * optional int64 start_time = 3; * @return Whether the startTime field is set. */ boolean hasStartTime(); /** * optional int64 start_time = 3; * @return The startTime. */ long getStartTime(); /** * optional int64 end_time = 4; * @return Whether the endTime field is set. */ boolean hasEndTime(); /** * optional int64 end_time = 4; * @return The endTime. */ long getEndTime(); /** * optional string user = 5; * @return Whether the user field is set. */ boolean hasUser(); /** * optional string user = 5; * @return The user. */ java.lang.String getUser(); /** * optional string user = 5; * @return The bytes for user. */ org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes(); /** * optional bool contains_gangs = 6; * @return Whether the containsGangs field is set. */ boolean hasContainsGangs(); /** * optional bool contains_gangs = 6; * @return The containsGangs. */ boolean getContainsGangs(); /** * optional int64 acceptance_time = 7; * @return Whether the acceptanceTime field is set. */ boolean hasAcceptanceTime(); /** * optional int64 acceptance_time = 7; * @return The acceptanceTime. */ long getAcceptanceTime(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return Whether the reservationId field is set. */ boolean hasReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return The reservationId. */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId(); /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.ReservationAllocationStateProto} */ public static final class ReservationAllocationStateProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationAllocationStateProto) ReservationAllocationStateProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ReservationAllocationStateProto.newBuilder() to construct. private ReservationAllocationStateProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ReservationAllocationStateProto() { allocationRequests_ = java.util.Collections.emptyList(); user_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReservationAllocationStateProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder.class); } private int bitField0_; public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return Whether the reservationDefinition field is set. */ @java.lang.Override public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return The reservationDefinition. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } public static final int ALLOCATION_REQUESTS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List allocationRequests_; /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ @java.lang.Override public java.util.List getAllocationRequestsList() { return allocationRequests_; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ @java.lang.Override public java.util.List getAllocationRequestsOrBuilderList() { return allocationRequests_; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ @java.lang.Override public int getAllocationRequestsCount() { return allocationRequests_.size(); } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index) { return allocationRequests_.get(index); } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder( int index) { return allocationRequests_.get(index); } public static final int START_TIME_FIELD_NUMBER = 3; private long startTime_ = 0L; /** * optional int64 start_time = 3; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int64 start_time = 3; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } public static final int END_TIME_FIELD_NUMBER = 4; private long endTime_ = 0L; /** * optional int64 end_time = 4; * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 end_time = 4; * @return The endTime. */ @java.lang.Override public long getEndTime() { return endTime_; } public static final int USER_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object user_ = ""; /** * optional string user = 5; * @return Whether the user field is set. */ @java.lang.Override public boolean hasUser() { return ((bitField0_ & 0x00000008) != 0); } /** * optional string user = 5; * @return The user. */ @java.lang.Override public java.lang.String getUser() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } } /** * optional string user = 5; * @return The bytes for user. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int CONTAINS_GANGS_FIELD_NUMBER = 6; private boolean containsGangs_ = false; /** * optional bool contains_gangs = 6; * @return Whether the containsGangs field is set. */ @java.lang.Override public boolean hasContainsGangs() { return ((bitField0_ & 0x00000010) != 0); } /** * optional bool contains_gangs = 6; * @return The containsGangs. */ @java.lang.Override public boolean getContainsGangs() { return containsGangs_; } public static final int ACCEPTANCE_TIME_FIELD_NUMBER = 7; private long acceptanceTime_ = 0L; /** * optional int64 acceptance_time = 7; * @return Whether the acceptanceTime field is set. */ @java.lang.Override public boolean hasAcceptanceTime() { return ((bitField0_ & 0x00000020) != 0); } /** * optional int64 acceptance_time = 7; * @return The acceptanceTime. */ @java.lang.Override public long getAcceptanceTime() { return acceptanceTime_; } public static final int RESERVATION_ID_FIELD_NUMBER = 8; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return Whether the reservationId field is set. */ @java.lang.Override public boolean hasReservationId() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return The reservationId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAllocationRequestsCount(); i++) { if (!getAllocationRequests(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReservationDefinition()); } for (int i = 0; i < allocationRequests_.size(); i++) { output.writeMessage(2, allocationRequests_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt64(3, startTime_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt64(4, endTime_); } if (((bitField0_ & 0x00000008) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, user_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeBool(6, containsGangs_); } if (((bitField0_ & 0x00000020) != 0)) { output.writeInt64(7, acceptanceTime_); } if (((bitField0_ & 0x00000040) != 0)) { output.writeMessage(8, getReservationId()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getReservationDefinition()); } for (int i = 0; i < allocationRequests_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, allocationRequests_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(3, startTime_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(4, endTime_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, user_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBoolSize(6, containsGangs_); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(7, acceptanceTime_); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(8, getReservationId()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto) obj; if (hasReservationDefinition() != other.hasReservationDefinition()) return false; if (hasReservationDefinition()) { if (!getReservationDefinition() .equals(other.getReservationDefinition())) return false; } if (!getAllocationRequestsList() .equals(other.getAllocationRequestsList())) return false; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (getStartTime() != other.getStartTime()) return false; } if (hasEndTime() != other.hasEndTime()) return false; if (hasEndTime()) { if (getEndTime() != other.getEndTime()) return false; } if (hasUser() != other.hasUser()) return false; if (hasUser()) { if (!getUser() .equals(other.getUser())) return false; } if (hasContainsGangs() != other.hasContainsGangs()) return false; if (hasContainsGangs()) { if (getContainsGangs() != other.getContainsGangs()) return false; } if (hasAcceptanceTime() != other.hasAcceptanceTime()) return false; if (hasAcceptanceTime()) { if (getAcceptanceTime() != other.getAcceptanceTime()) return false; } if (hasReservationId() != other.hasReservationId()) return false; if (hasReservationId()) { if (!getReservationId() .equals(other.getReservationId())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReservationDefinition()) { hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER; hash = (53 * hash) + getReservationDefinition().hashCode(); } if (getAllocationRequestsCount() > 0) { hash = (37 * hash) + ALLOCATION_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getAllocationRequestsList().hashCode(); } if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getStartTime()); } if (hasEndTime()) { hash = (37 * hash) + END_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getEndTime()); } if (hasUser()) { hash = (37 * hash) + USER_FIELD_NUMBER; hash = (53 * hash) + getUser().hashCode(); } if (hasContainsGangs()) { hash = (37 * hash) + CONTAINS_GANGS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean( getContainsGangs()); } if (hasAcceptanceTime()) { hash = (37 * hash) + ACCEPTANCE_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getAcceptanceTime()); } if (hasReservationId()) { hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER; hash = (53 * hash) + getReservationId().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ReservationAllocationStateProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationAllocationStateProto) org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReservationDefinitionFieldBuilder(); getAllocationRequestsFieldBuilder(); getReservationIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; reservationDefinition_ = null; if (reservationDefinitionBuilder_ != null) { reservationDefinitionBuilder_.dispose(); reservationDefinitionBuilder_ = null; } if (allocationRequestsBuilder_ == null) { allocationRequests_ = java.util.Collections.emptyList(); } else { allocationRequests_ = null; allocationRequestsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); startTime_ = 0L; endTime_ = 0L; user_ = ""; containsGangs_ = false; acceptanceTime_ = 0L; reservationId_ = null; if (reservationIdBuilder_ != null) { reservationIdBuilder_.dispose(); reservationIdBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result) { if (allocationRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { allocationRequests_ = java.util.Collections.unmodifiableList(allocationRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.allocationRequests_ = allocationRequests_; } else { result.allocationRequests_ = allocationRequestsBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.reservationDefinition_ = reservationDefinitionBuilder_ == null ? reservationDefinition_ : reservationDefinitionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.startTime_ = startTime_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000008) != 0)) { result.endTime_ = endTime_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000010) != 0)) { result.user_ = user_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000020) != 0)) { result.containsGangs_ = containsGangs_; to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000040) != 0)) { result.acceptanceTime_ = acceptanceTime_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000080) != 0)) { result.reservationId_ = reservationIdBuilder_ == null ? reservationId_ : reservationIdBuilder_.build(); to_bitField0_ |= 0x00000040; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance()) return this; if (other.hasReservationDefinition()) { mergeReservationDefinition(other.getReservationDefinition()); } if (allocationRequestsBuilder_ == null) { if (!other.allocationRequests_.isEmpty()) { if (allocationRequests_.isEmpty()) { allocationRequests_ = other.allocationRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAllocationRequestsIsMutable(); allocationRequests_.addAll(other.allocationRequests_); } onChanged(); } } else { if (!other.allocationRequests_.isEmpty()) { if (allocationRequestsBuilder_.isEmpty()) { allocationRequestsBuilder_.dispose(); allocationRequestsBuilder_ = null; allocationRequests_ = other.allocationRequests_; bitField0_ = (bitField0_ & ~0x00000002); allocationRequestsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAllocationRequestsFieldBuilder() : null; } else { allocationRequestsBuilder_.addAllMessages(other.allocationRequests_); } } } if (other.hasStartTime()) { setStartTime(other.getStartTime()); } if (other.hasEndTime()) { setEndTime(other.getEndTime()); } if (other.hasUser()) { user_ = other.user_; bitField0_ |= 0x00000010; onChanged(); } if (other.hasContainsGangs()) { setContainsGangs(other.getContainsGangs()); } if (other.hasAcceptanceTime()) { setAcceptanceTime(other.getAcceptanceTime()); } if (other.hasReservationId()) { mergeReservationId(other.getReservationId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasReservationDefinition()) { if (!getReservationDefinition().isInitialized()) { return false; } } for (int i = 0; i < getAllocationRequestsCount(); i++) { if (!getAllocationRequests(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getReservationDefinitionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.PARSER, extensionRegistry); if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); allocationRequests_.add(m); } else { allocationRequestsBuilder_.addMessage(m); } break; } // case 18 case 24: { startTime_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { endTime_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 case 42: { user_ = input.readBytes(); bitField0_ |= 0x00000010; break; } // case 42 case 48: { containsGangs_ = input.readBool(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { acceptanceTime_ = input.readInt64(); bitField0_ |= 0x00000040; break; } // case 56 case 66: { input.readMessage( getReservationIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000080; break; } // case 66 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_; /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return Whether the reservationDefinition field is set. */ public boolean hasReservationDefinition() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; * @return The reservationDefinition. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() { if (reservationDefinitionBuilder_ == null) { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } else { return reservationDefinitionBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder setReservationDefinition(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationDefinition_ = value; } else { reservationDefinitionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder setReservationDefinition( org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) { if (reservationDefinitionBuilder_ == null) { reservationDefinition_ = builderForValue.build(); } else { reservationDefinitionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder mergeReservationDefinition(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) { if (reservationDefinitionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reservationDefinition_ != null && reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) { getReservationDefinitionBuilder().mergeFrom(value); } else { reservationDefinition_ = value; } } else { reservationDefinitionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public Builder clearReservationDefinition() { bitField0_ = (bitField0_ & ~0x00000001); reservationDefinition_ = null; if (reservationDefinitionBuilder_ != null) { reservationDefinitionBuilder_.dispose(); reservationDefinitionBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReservationDefinitionFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() { if (reservationDefinitionBuilder_ != null) { return reservationDefinitionBuilder_.getMessageOrBuilder(); } else { return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_; } } /** * optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> getReservationDefinitionFieldBuilder() { if (reservationDefinitionBuilder_ == null) { reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>( getReservationDefinition(), getParentForChildren(), isClean()); reservationDefinition_ = null; } return reservationDefinitionBuilder_; } private java.util.List allocationRequests_ = java.util.Collections.emptyList(); private void ensureAllocationRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { allocationRequests_ = new java.util.ArrayList(allocationRequests_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> allocationRequestsBuilder_; /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public java.util.List getAllocationRequestsList() { if (allocationRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(allocationRequests_); } else { return allocationRequestsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public int getAllocationRequestsCount() { if (allocationRequestsBuilder_ == null) { return allocationRequests_.size(); } else { return allocationRequestsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index) { if (allocationRequestsBuilder_ == null) { return allocationRequests_.get(index); } else { return allocationRequestsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder setAllocationRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) { if (allocationRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocationRequestsIsMutable(); allocationRequests_.set(index, value); onChanged(); } else { allocationRequestsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder setAllocationRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) { if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); allocationRequests_.set(index, builderForValue.build()); onChanged(); } else { allocationRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder addAllocationRequests(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) { if (allocationRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocationRequestsIsMutable(); allocationRequests_.add(value); onChanged(); } else { allocationRequestsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder addAllocationRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) { if (allocationRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAllocationRequestsIsMutable(); allocationRequests_.add(index, value); onChanged(); } else { allocationRequestsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder addAllocationRequests( org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) { if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); allocationRequests_.add(builderForValue.build()); onChanged(); } else { allocationRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder addAllocationRequests( int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) { if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); allocationRequests_.add(index, builderForValue.build()); onChanged(); } else { allocationRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder addAllAllocationRequests( java.lang.Iterable values) { if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, allocationRequests_); onChanged(); } else { allocationRequestsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder clearAllocationRequests() { if (allocationRequestsBuilder_ == null) { allocationRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { allocationRequestsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public Builder removeAllocationRequests(int index) { if (allocationRequestsBuilder_ == null) { ensureAllocationRequestsIsMutable(); allocationRequests_.remove(index); onChanged(); } else { allocationRequestsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder getAllocationRequestsBuilder( int index) { return getAllocationRequestsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder( int index) { if (allocationRequestsBuilder_ == null) { return allocationRequests_.get(index); } else { return allocationRequestsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public java.util.List getAllocationRequestsOrBuilderList() { if (allocationRequestsBuilder_ != null) { return allocationRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(allocationRequests_); } } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder addAllocationRequestsBuilder() { return getAllocationRequestsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder addAllocationRequestsBuilder( int index) { return getAllocationRequestsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2; */ public java.util.List getAllocationRequestsBuilderList() { return getAllocationRequestsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> getAllocationRequestsFieldBuilder() { if (allocationRequestsBuilder_ == null) { allocationRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder>( allocationRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); allocationRequests_ = null; } return allocationRequestsBuilder_; } private long startTime_ ; /** * optional int64 start_time = 3; * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int64 start_time = 3; * @return The startTime. */ @java.lang.Override public long getStartTime() { return startTime_; } /** * optional int64 start_time = 3; * @param value The startTime to set. * @return This builder for chaining. */ public Builder setStartTime(long value) { startTime_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int64 start_time = 3; * @return This builder for chaining. */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000004); startTime_ = 0L; onChanged(); return this; } private long endTime_ ; /** * optional int64 end_time = 4; * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 end_time = 4; * @return The endTime. */ @java.lang.Override public long getEndTime() { return endTime_; } /** * optional int64 end_time = 4; * @param value The endTime to set. * @return This builder for chaining. */ public Builder setEndTime(long value) { endTime_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int64 end_time = 4; * @return This builder for chaining. */ public Builder clearEndTime() { bitField0_ = (bitField0_ & ~0x00000008); endTime_ = 0L; onChanged(); return this; } private java.lang.Object user_ = ""; /** * optional string user = 5; * @return Whether the user field is set. */ public boolean hasUser() { return ((bitField0_ & 0x00000010) != 0); } /** * optional string user = 5; * @return The user. */ public java.lang.String getUser() { java.lang.Object ref = user_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string user = 5; * @return The bytes for user. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string user = 5; * @param value The user to set. * @return This builder for chaining. */ public Builder setUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } user_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional string user = 5; * @return This builder for chaining. */ public Builder clearUser() { user_ = getDefaultInstance().getUser(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * optional string user = 5; * @param value The bytes for user to set. * @return This builder for chaining. */ public Builder setUserBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } user_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } private boolean containsGangs_ ; /** * optional bool contains_gangs = 6; * @return Whether the containsGangs field is set. */ @java.lang.Override public boolean hasContainsGangs() { return ((bitField0_ & 0x00000020) != 0); } /** * optional bool contains_gangs = 6; * @return The containsGangs. */ @java.lang.Override public boolean getContainsGangs() { return containsGangs_; } /** * optional bool contains_gangs = 6; * @param value The containsGangs to set. * @return This builder for chaining. */ public Builder setContainsGangs(boolean value) { containsGangs_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * optional bool contains_gangs = 6; * @return This builder for chaining. */ public Builder clearContainsGangs() { bitField0_ = (bitField0_ & ~0x00000020); containsGangs_ = false; onChanged(); return this; } private long acceptanceTime_ ; /** * optional int64 acceptance_time = 7; * @return Whether the acceptanceTime field is set. */ @java.lang.Override public boolean hasAcceptanceTime() { return ((bitField0_ & 0x00000040) != 0); } /** * optional int64 acceptance_time = 7; * @return The acceptanceTime. */ @java.lang.Override public long getAcceptanceTime() { return acceptanceTime_; } /** * optional int64 acceptance_time = 7; * @param value The acceptanceTime to set. * @return This builder for chaining. */ public Builder setAcceptanceTime(long value) { acceptanceTime_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional int64 acceptance_time = 7; * @return This builder for chaining. */ public Builder clearAcceptanceTime() { bitField0_ = (bitField0_ & ~0x00000040); acceptanceTime_ = 0L; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_; /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return Whether the reservationId field is set. */ public boolean hasReservationId() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; * @return The reservationId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() { if (reservationIdBuilder_ == null) { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } else { return reservationIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public Builder setReservationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reservationId_ = value; } else { reservationIdBuilder_.setMessage(value); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public Builder setReservationId( org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) { if (reservationIdBuilder_ == null) { reservationId_ = builderForValue.build(); } else { reservationIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public Builder mergeReservationId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) { if (reservationIdBuilder_ == null) { if (((bitField0_ & 0x00000080) != 0) && reservationId_ != null && reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) { getReservationIdBuilder().mergeFrom(value); } else { reservationId_ = value; } } else { reservationIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public Builder clearReservationId() { bitField0_ = (bitField0_ & ~0x00000080); reservationId_ = null; if (reservationIdBuilder_ != null) { reservationIdBuilder_.dispose(); reservationIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() { bitField0_ |= 0x00000080; onChanged(); return getReservationIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() { if (reservationIdBuilder_ != null) { return reservationIdBuilder_.getMessageOrBuilder(); } else { return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_; } } /** * optional .hadoop.yarn.ReservationIdProto reservation_id = 8; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> getReservationIdFieldBuilder() { if (reservationIdBuilder_ == null) { reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>( getReservationId(), getParentForChildren(), isClean()); reservationId_ = null; } return reservationIdBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationAllocationStateProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationAllocationStateProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ReservationAllocationStateProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerLaunchContextProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerLaunchContextProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ java.util.List getLocalResourcesList(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index); /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ int getLocalResourcesCount(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ java.util.List getLocalResourcesOrBuilderList(); /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index); /** * optional bytes tokens = 2; * @return Whether the tokens field is set. */ boolean hasTokens(); /** * optional bytes tokens = 2; * @return The tokens. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTokens(); /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ java.util.List getServiceDataList(); /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index); /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ int getServiceDataCount(); /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ java.util.List getServiceDataOrBuilderList(); /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder( int index); /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ java.util.List getEnvironmentList(); /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index); /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ int getEnvironmentCount(); /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ java.util.List getEnvironmentOrBuilderList(); /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder( int index); /** * repeated string command = 5; * @return A list containing the command. */ java.util.List getCommandList(); /** * repeated string command = 5; * @return The count of command. */ int getCommandCount(); /** * repeated string command = 5; * @param index The index of the element to return. * @return The command at the given index. */ java.lang.String getCommand(int index); /** * repeated string command = 5; * @param index The index of the value to return. * @return The bytes of the command at the given index. */ org.apache.hadoop.thirdparty.protobuf.ByteString getCommandBytes(int index); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ java.util.List getApplicationACLsList(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ int getApplicationACLsCount(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ java.util.List getApplicationACLsOrBuilderList(); /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index); /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return Whether the containerRetryContext field is set. */ boolean hasContainerRetryContext(); /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return The containerRetryContext. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext(); /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder(); /** * optional bytes tokens_conf = 8; * @return Whether the tokensConf field is set. */ boolean hasTokensConf(); /** * optional bytes tokens_conf = 8; * @return The tokensConf. */ org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf(); } /** * Protobuf type {@code hadoop.yarn.ContainerLaunchContextProto} */ public static final class ContainerLaunchContextProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerLaunchContextProto) ContainerLaunchContextProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerLaunchContextProto.newBuilder() to construct. private ContainerLaunchContextProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerLaunchContextProto() { localResources_ = java.util.Collections.emptyList(); tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; serviceData_ = java.util.Collections.emptyList(); environment_ = java.util.Collections.emptyList(); command_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; applicationACLs_ = java.util.Collections.emptyList(); tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerLaunchContextProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder.class); } private int bitField0_; public static final int LOCALRESOURCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List localResources_; /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ @java.lang.Override public java.util.List getLocalResourcesList() { return localResources_; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ @java.lang.Override public java.util.List getLocalResourcesOrBuilderList() { return localResources_; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ @java.lang.Override public int getLocalResourcesCount() { return localResources_.size(); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) { return localResources_.get(index); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index) { return localResources_.get(index); } public static final int TOKENS_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes tokens = 2; * @return Whether the tokens field is set. */ @java.lang.Override public boolean hasTokens() { return ((bitField0_ & 0x00000001) != 0); } /** * optional bytes tokens = 2; * @return The tokens. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTokens() { return tokens_; } public static final int SERVICE_DATA_FIELD_NUMBER = 3; @SuppressWarnings("serial") private java.util.List serviceData_; /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ @java.lang.Override public java.util.List getServiceDataList() { return serviceData_; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ @java.lang.Override public java.util.List getServiceDataOrBuilderList() { return serviceData_; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ @java.lang.Override public int getServiceDataCount() { return serviceData_.size(); } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index) { return serviceData_.get(index); } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder( int index) { return serviceData_.get(index); } public static final int ENVIRONMENT_FIELD_NUMBER = 4; @SuppressWarnings("serial") private java.util.List environment_; /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ @java.lang.Override public java.util.List getEnvironmentList() { return environment_; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ @java.lang.Override public java.util.List getEnvironmentOrBuilderList() { return environment_; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ @java.lang.Override public int getEnvironmentCount() { return environment_.size(); } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index) { return environment_.get(index); } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder( int index) { return environment_.get(index); } public static final int COMMAND_FIELD_NUMBER = 5; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList command_; /** * repeated string command = 5; * @return A list containing the command. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getCommandList() { return command_; } /** * repeated string command = 5; * @return The count of command. */ public int getCommandCount() { return command_.size(); } /** * repeated string command = 5; * @param index The index of the element to return. * @return The command at the given index. */ public java.lang.String getCommand(int index) { return command_.get(index); } /** * repeated string command = 5; * @param index The index of the value to return. * @return The bytes of the command at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getCommandBytes(int index) { return command_.getByteString(index); } public static final int APPLICATION_ACLS_FIELD_NUMBER = 6; @SuppressWarnings("serial") private java.util.List applicationACLs_; /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ @java.lang.Override public java.util.List getApplicationACLsList() { return applicationACLs_; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ @java.lang.Override public java.util.List getApplicationACLsOrBuilderList() { return applicationACLs_; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ @java.lang.Override public int getApplicationACLsCount() { return applicationACLs_.size(); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) { return applicationACLs_.get(index); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index) { return applicationACLs_.get(index); } public static final int CONTAINER_RETRY_CONTEXT_FIELD_NUMBER = 7; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto containerRetryContext_; /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return Whether the containerRetryContext field is set. */ @java.lang.Override public boolean hasContainerRetryContext() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return The containerRetryContext. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext() { return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_; } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder() { return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_; } public static final int TOKENS_CONF_FIELD_NUMBER = 8; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes tokens_conf = 8; * @return Whether the tokensConf field is set. */ @java.lang.Override public boolean hasTokensConf() { return ((bitField0_ & 0x00000004) != 0); } /** * optional bytes tokens_conf = 8; * @return The tokensConf. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf() { return tokensConf_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < localResources_.size(); i++) { output.writeMessage(1, localResources_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeBytes(2, tokens_); } for (int i = 0; i < serviceData_.size(); i++) { output.writeMessage(3, serviceData_.get(i)); } for (int i = 0; i < environment_.size(); i++) { output.writeMessage(4, environment_.get(i)); } for (int i = 0; i < command_.size(); i++) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, command_.getRaw(i)); } for (int i = 0; i < applicationACLs_.size(); i++) { output.writeMessage(6, applicationACLs_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(7, getContainerRetryContext()); } if (((bitField0_ & 0x00000004) != 0)) { output.writeBytes(8, tokensConf_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < localResources_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, localResources_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBytesSize(2, tokens_); } for (int i = 0; i < serviceData_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(3, serviceData_.get(i)); } for (int i = 0; i < environment_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(4, environment_.get(i)); } { int dataSize = 0; for (int i = 0; i < command_.size(); i++) { dataSize += computeStringSizeNoTag(command_.getRaw(i)); } size += dataSize; size += 1 * getCommandList().size(); } for (int i = 0; i < applicationACLs_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(6, applicationACLs_.get(i)); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, getContainerRetryContext()); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBytesSize(8, tokensConf_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto) obj; if (!getLocalResourcesList() .equals(other.getLocalResourcesList())) return false; if (hasTokens() != other.hasTokens()) return false; if (hasTokens()) { if (!getTokens() .equals(other.getTokens())) return false; } if (!getServiceDataList() .equals(other.getServiceDataList())) return false; if (!getEnvironmentList() .equals(other.getEnvironmentList())) return false; if (!getCommandList() .equals(other.getCommandList())) return false; if (!getApplicationACLsList() .equals(other.getApplicationACLsList())) return false; if (hasContainerRetryContext() != other.hasContainerRetryContext()) return false; if (hasContainerRetryContext()) { if (!getContainerRetryContext() .equals(other.getContainerRetryContext())) return false; } if (hasTokensConf() != other.hasTokensConf()) return false; if (hasTokensConf()) { if (!getTokensConf() .equals(other.getTokensConf())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLocalResourcesCount() > 0) { hash = (37 * hash) + LOCALRESOURCES_FIELD_NUMBER; hash = (53 * hash) + getLocalResourcesList().hashCode(); } if (hasTokens()) { hash = (37 * hash) + TOKENS_FIELD_NUMBER; hash = (53 * hash) + getTokens().hashCode(); } if (getServiceDataCount() > 0) { hash = (37 * hash) + SERVICE_DATA_FIELD_NUMBER; hash = (53 * hash) + getServiceDataList().hashCode(); } if (getEnvironmentCount() > 0) { hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER; hash = (53 * hash) + getEnvironmentList().hashCode(); } if (getCommandCount() > 0) { hash = (37 * hash) + COMMAND_FIELD_NUMBER; hash = (53 * hash) + getCommandList().hashCode(); } if (getApplicationACLsCount() > 0) { hash = (37 * hash) + APPLICATION_ACLS_FIELD_NUMBER; hash = (53 * hash) + getApplicationACLsList().hashCode(); } if (hasContainerRetryContext()) { hash = (37 * hash) + CONTAINER_RETRY_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getContainerRetryContext().hashCode(); } if (hasTokensConf()) { hash = (37 * hash) + TOKENS_CONF_FIELD_NUMBER; hash = (53 * hash) + getTokensConf().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerLaunchContextProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerLaunchContextProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getLocalResourcesFieldBuilder(); getServiceDataFieldBuilder(); getEnvironmentFieldBuilder(); getApplicationACLsFieldBuilder(); getContainerRetryContextFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (localResourcesBuilder_ == null) { localResources_ = java.util.Collections.emptyList(); } else { localResources_ = null; localResourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; if (serviceDataBuilder_ == null) { serviceData_ = java.util.Collections.emptyList(); } else { serviceData_ = null; serviceDataBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (environmentBuilder_ == null) { environment_ = java.util.Collections.emptyList(); } else { environment_ = null; environmentBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); command_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); if (applicationACLsBuilder_ == null) { applicationACLs_ = java.util.Collections.emptyList(); } else { applicationACLs_ = null; applicationACLsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); containerRetryContext_ = null; if (containerRetryContextBuilder_ != null) { containerRetryContextBuilder_.dispose(); containerRetryContextBuilder_ = null; } tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result) { if (localResourcesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { localResources_ = java.util.Collections.unmodifiableList(localResources_); bitField0_ = (bitField0_ & ~0x00000001); } result.localResources_ = localResources_; } else { result.localResources_ = localResourcesBuilder_.build(); } if (serviceDataBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { serviceData_ = java.util.Collections.unmodifiableList(serviceData_); bitField0_ = (bitField0_ & ~0x00000004); } result.serviceData_ = serviceData_; } else { result.serviceData_ = serviceDataBuilder_.build(); } if (environmentBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0)) { environment_ = java.util.Collections.unmodifiableList(environment_); bitField0_ = (bitField0_ & ~0x00000008); } result.environment_ = environment_; } else { result.environment_ = environmentBuilder_.build(); } if (((bitField0_ & 0x00000010) != 0)) { command_ = command_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } result.command_ = command_; if (applicationACLsBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0)) { applicationACLs_ = java.util.Collections.unmodifiableList(applicationACLs_); bitField0_ = (bitField0_ & ~0x00000020); } result.applicationACLs_ = applicationACLs_; } else { result.applicationACLs_ = applicationACLsBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.tokens_ = tokens_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000040) != 0)) { result.containerRetryContext_ = containerRetryContextBuilder_ == null ? containerRetryContext_ : containerRetryContextBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000080) != 0)) { result.tokensConf_ = tokensConf_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) return this; if (localResourcesBuilder_ == null) { if (!other.localResources_.isEmpty()) { if (localResources_.isEmpty()) { localResources_ = other.localResources_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLocalResourcesIsMutable(); localResources_.addAll(other.localResources_); } onChanged(); } } else { if (!other.localResources_.isEmpty()) { if (localResourcesBuilder_.isEmpty()) { localResourcesBuilder_.dispose(); localResourcesBuilder_ = null; localResources_ = other.localResources_; bitField0_ = (bitField0_ & ~0x00000001); localResourcesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getLocalResourcesFieldBuilder() : null; } else { localResourcesBuilder_.addAllMessages(other.localResources_); } } } if (other.hasTokens()) { setTokens(other.getTokens()); } if (serviceDataBuilder_ == null) { if (!other.serviceData_.isEmpty()) { if (serviceData_.isEmpty()) { serviceData_ = other.serviceData_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureServiceDataIsMutable(); serviceData_.addAll(other.serviceData_); } onChanged(); } } else { if (!other.serviceData_.isEmpty()) { if (serviceDataBuilder_.isEmpty()) { serviceDataBuilder_.dispose(); serviceDataBuilder_ = null; serviceData_ = other.serviceData_; bitField0_ = (bitField0_ & ~0x00000004); serviceDataBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getServiceDataFieldBuilder() : null; } else { serviceDataBuilder_.addAllMessages(other.serviceData_); } } } if (environmentBuilder_ == null) { if (!other.environment_.isEmpty()) { if (environment_.isEmpty()) { environment_ = other.environment_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureEnvironmentIsMutable(); environment_.addAll(other.environment_); } onChanged(); } } else { if (!other.environment_.isEmpty()) { if (environmentBuilder_.isEmpty()) { environmentBuilder_.dispose(); environmentBuilder_ = null; environment_ = other.environment_; bitField0_ = (bitField0_ & ~0x00000008); environmentBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEnvironmentFieldBuilder() : null; } else { environmentBuilder_.addAllMessages(other.environment_); } } } if (!other.command_.isEmpty()) { if (command_.isEmpty()) { command_ = other.command_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureCommandIsMutable(); command_.addAll(other.command_); } onChanged(); } if (applicationACLsBuilder_ == null) { if (!other.applicationACLs_.isEmpty()) { if (applicationACLs_.isEmpty()) { applicationACLs_ = other.applicationACLs_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureApplicationACLsIsMutable(); applicationACLs_.addAll(other.applicationACLs_); } onChanged(); } } else { if (!other.applicationACLs_.isEmpty()) { if (applicationACLsBuilder_.isEmpty()) { applicationACLsBuilder_.dispose(); applicationACLsBuilder_ = null; applicationACLs_ = other.applicationACLs_; bitField0_ = (bitField0_ & ~0x00000020); applicationACLsBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApplicationACLsFieldBuilder() : null; } else { applicationACLsBuilder_.addAllMessages(other.applicationACLs_); } } } if (other.hasContainerRetryContext()) { mergeContainerRetryContext(other.getContainerRetryContext()); } if (other.hasTokensConf()) { setTokensConf(other.getTokensConf()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.PARSER, extensionRegistry); if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.add(m); } else { localResourcesBuilder_.addMessage(m); } break; } // case 10 case 18: { tokens_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER, extensionRegistry); if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); serviceData_.add(m); } else { serviceDataBuilder_.addMessage(m); } break; } // case 26 case 34: { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry); if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); environment_.add(m); } else { environmentBuilder_.addMessage(m); } break; } // case 34 case 42: { org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes(); ensureCommandIsMutable(); command_.add(bs); break; } // case 42 case 50: { org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.PARSER, extensionRegistry); if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.add(m); } else { applicationACLsBuilder_.addMessage(m); } break; } // case 50 case 58: { input.readMessage( getContainerRetryContextFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000040; break; } // case 58 case 66: { tokensConf_ = input.readBytes(); bitField0_ |= 0x00000080; break; } // case 66 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List localResources_ = java.util.Collections.emptyList(); private void ensureLocalResourcesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { localResources_ = new java.util.ArrayList(localResources_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> localResourcesBuilder_; /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public java.util.List getLocalResourcesList() { if (localResourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(localResources_); } else { return localResourcesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public int getLocalResourcesCount() { if (localResourcesBuilder_ == null) { return localResources_.size(); } else { return localResourcesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) { if (localResourcesBuilder_ == null) { return localResources_.get(index); } else { return localResourcesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder setLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.set(index, value); onChanged(); } else { localResourcesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder setLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.set(index, builderForValue.build()); onChanged(); } else { localResourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder addLocalResources(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.add(value); onChanged(); } else { localResourcesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder addLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) { if (localResourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureLocalResourcesIsMutable(); localResources_.add(index, value); onChanged(); } else { localResourcesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder addLocalResources( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.add(builderForValue.build()); onChanged(); } else { localResourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder addLocalResources( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.add(index, builderForValue.build()); onChanged(); } else { localResourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder addAllLocalResources( java.lang.Iterable values) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, localResources_); onChanged(); } else { localResourcesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder clearLocalResources() { if (localResourcesBuilder_ == null) { localResources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { localResourcesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public Builder removeLocalResources(int index) { if (localResourcesBuilder_ == null) { ensureLocalResourcesIsMutable(); localResources_.remove(index); onChanged(); } else { localResourcesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder getLocalResourcesBuilder( int index) { return getLocalResourcesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder( int index) { if (localResourcesBuilder_ == null) { return localResources_.get(index); } else { return localResourcesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public java.util.List getLocalResourcesOrBuilderList() { if (localResourcesBuilder_ != null) { return localResourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(localResources_); } } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder() { return getLocalResourcesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder( int index) { return getLocalResourcesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1; */ public java.util.List getLocalResourcesBuilderList() { return getLocalResourcesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> getLocalResourcesFieldBuilder() { if (localResourcesBuilder_ == null) { localResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder>( localResources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); localResources_ = null; } return localResourcesBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes tokens = 2; * @return Whether the tokens field is set. */ @java.lang.Override public boolean hasTokens() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bytes tokens = 2; * @return The tokens. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTokens() { return tokens_; } /** * optional bytes tokens = 2; * @param value The tokens to set. * @return This builder for chaining. */ public Builder setTokens(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } tokens_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional bytes tokens = 2; * @return This builder for chaining. */ public Builder clearTokens() { bitField0_ = (bitField0_ & ~0x00000002); tokens_ = getDefaultInstance().getTokens(); onChanged(); return this; } private java.util.List serviceData_ = java.util.Collections.emptyList(); private void ensureServiceDataIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { serviceData_ = new java.util.ArrayList(serviceData_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> serviceDataBuilder_; /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public java.util.List getServiceDataList() { if (serviceDataBuilder_ == null) { return java.util.Collections.unmodifiableList(serviceData_); } else { return serviceDataBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public int getServiceDataCount() { if (serviceDataBuilder_ == null) { return serviceData_.size(); } else { return serviceDataBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index) { if (serviceDataBuilder_ == null) { return serviceData_.get(index); } else { return serviceDataBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder setServiceData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (serviceDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceDataIsMutable(); serviceData_.set(index, value); onChanged(); } else { serviceDataBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder setServiceData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); serviceData_.set(index, builderForValue.build()); onChanged(); } else { serviceDataBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder addServiceData(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (serviceDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceDataIsMutable(); serviceData_.add(value); onChanged(); } else { serviceDataBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder addServiceData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) { if (serviceDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServiceDataIsMutable(); serviceData_.add(index, value); onChanged(); } else { serviceDataBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder addServiceData( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); serviceData_.add(builderForValue.build()); onChanged(); } else { serviceDataBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder addServiceData( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) { if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); serviceData_.add(index, builderForValue.build()); onChanged(); } else { serviceDataBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder addAllServiceData( java.lang.Iterable values) { if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, serviceData_); onChanged(); } else { serviceDataBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder clearServiceData() { if (serviceDataBuilder_ == null) { serviceData_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { serviceDataBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public Builder removeServiceData(int index) { if (serviceDataBuilder_ == null) { ensureServiceDataIsMutable(); serviceData_.remove(index); onChanged(); } else { serviceDataBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServiceDataBuilder( int index) { return getServiceDataFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder( int index) { if (serviceDataBuilder_ == null) { return serviceData_.get(index); } else { return serviceDataBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public java.util.List getServiceDataOrBuilderList() { if (serviceDataBuilder_ != null) { return serviceDataBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(serviceData_); } } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServiceDataBuilder() { return getServiceDataFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServiceDataBuilder( int index) { return getServiceDataFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringBytesMapProto service_data = 3; */ public java.util.List getServiceDataBuilderList() { return getServiceDataFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> getServiceDataFieldBuilder() { if (serviceDataBuilder_ == null) { serviceDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>( serviceData_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); serviceData_ = null; } return serviceDataBuilder_; } private java.util.List environment_ = java.util.Collections.emptyList(); private void ensureEnvironmentIsMutable() { if (!((bitField0_ & 0x00000008) != 0)) { environment_ = new java.util.ArrayList(environment_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> environmentBuilder_; /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public java.util.List getEnvironmentList() { if (environmentBuilder_ == null) { return java.util.Collections.unmodifiableList(environment_); } else { return environmentBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public int getEnvironmentCount() { if (environmentBuilder_ == null) { return environment_.size(); } else { return environmentBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index) { if (environmentBuilder_ == null) { return environment_.get(index); } else { return environmentBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder setEnvironment( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (environmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnvironmentIsMutable(); environment_.set(index, value); onChanged(); } else { environmentBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder setEnvironment( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); environment_.set(index, builderForValue.build()); onChanged(); } else { environmentBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder addEnvironment(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (environmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnvironmentIsMutable(); environment_.add(value); onChanged(); } else { environmentBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder addEnvironment( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (environmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEnvironmentIsMutable(); environment_.add(index, value); onChanged(); } else { environmentBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder addEnvironment( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); environment_.add(builderForValue.build()); onChanged(); } else { environmentBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder addEnvironment( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); environment_.add(index, builderForValue.build()); onChanged(); } else { environmentBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder addAllEnvironment( java.lang.Iterable values) { if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, environment_); onChanged(); } else { environmentBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder clearEnvironment() { if (environmentBuilder_ == null) { environment_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { environmentBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public Builder removeEnvironment(int index) { if (environmentBuilder_ == null) { ensureEnvironmentIsMutable(); environment_.remove(index); onChanged(); } else { environmentBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getEnvironmentBuilder( int index) { return getEnvironmentFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder( int index) { if (environmentBuilder_ == null) { return environment_.get(index); } else { return environmentBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public java.util.List getEnvironmentOrBuilderList() { if (environmentBuilder_ != null) { return environmentBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(environment_); } } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addEnvironmentBuilder() { return getEnvironmentFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addEnvironmentBuilder( int index) { return getEnvironmentFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto environment = 4; */ public java.util.List getEnvironmentBuilderList() { return getEnvironmentFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> getEnvironmentFieldBuilder() { if (environmentBuilder_ == null) { environmentBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>( environment_, ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); environment_ = null; } return environmentBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList command_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; private void ensureCommandIsMutable() { if (!((bitField0_ & 0x00000010) != 0)) { command_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(command_); bitField0_ |= 0x00000010; } } /** * repeated string command = 5; * @return A list containing the command. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList getCommandList() { return command_.getUnmodifiableView(); } /** * repeated string command = 5; * @return The count of command. */ public int getCommandCount() { return command_.size(); } /** * repeated string command = 5; * @param index The index of the element to return. * @return The command at the given index. */ public java.lang.String getCommand(int index) { return command_.get(index); } /** * repeated string command = 5; * @param index The index of the value to return. * @return The bytes of the command at the given index. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getCommandBytes(int index) { return command_.getByteString(index); } /** * repeated string command = 5; * @param index The index to set the value at. * @param value The command to set. * @return This builder for chaining. */ public Builder setCommand( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCommandIsMutable(); command_.set(index, value); onChanged(); return this; } /** * repeated string command = 5; * @param value The command to add. * @return This builder for chaining. */ public Builder addCommand( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCommandIsMutable(); command_.add(value); onChanged(); return this; } /** * repeated string command = 5; * @param values The command to add. * @return This builder for chaining. */ public Builder addAllCommand( java.lang.Iterable values) { ensureCommandIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, command_); onChanged(); return this; } /** * repeated string command = 5; * @return This builder for chaining. */ public Builder clearCommand() { command_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * repeated string command = 5; * @param value The bytes of the command to add. * @return This builder for chaining. */ public Builder addCommandBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureCommandIsMutable(); command_.add(value); onChanged(); return this; } private java.util.List applicationACLs_ = java.util.Collections.emptyList(); private void ensureApplicationACLsIsMutable() { if (!((bitField0_ & 0x00000020) != 0)) { applicationACLs_ = new java.util.ArrayList(applicationACLs_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> applicationACLsBuilder_; /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public java.util.List getApplicationACLsList() { if (applicationACLsBuilder_ == null) { return java.util.Collections.unmodifiableList(applicationACLs_); } else { return applicationACLsBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public int getApplicationACLsCount() { if (applicationACLsBuilder_ == null) { return applicationACLs_.size(); } else { return applicationACLsBuilder_.getCount(); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) { if (applicationACLsBuilder_ == null) { return applicationACLs_.get(index); } else { return applicationACLsBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder setApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.set(index, value); onChanged(); } else { applicationACLsBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder setApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.set(index, builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder addApplicationACLs(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.add(value); onChanged(); } else { applicationACLsBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder addApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) { if (applicationACLsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApplicationACLsIsMutable(); applicationACLs_.add(index, value); onChanged(); } else { applicationACLsBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder addApplicationACLs( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.add(builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder addApplicationACLs( int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.add(index, builderForValue.build()); onChanged(); } else { applicationACLsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder addAllApplicationACLs( java.lang.Iterable values) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, applicationACLs_); onChanged(); } else { applicationACLsBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder clearApplicationACLs() { if (applicationACLsBuilder_ == null) { applicationACLs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { applicationACLsBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public Builder removeApplicationACLs(int index) { if (applicationACLsBuilder_ == null) { ensureApplicationACLsIsMutable(); applicationACLs_.remove(index); onChanged(); } else { applicationACLsBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder getApplicationACLsBuilder( int index) { return getApplicationACLsFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder( int index) { if (applicationACLsBuilder_ == null) { return applicationACLs_.get(index); } else { return applicationACLsBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public java.util.List getApplicationACLsOrBuilderList() { if (applicationACLsBuilder_ != null) { return applicationACLsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(applicationACLs_); } } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder() { return getApplicationACLsFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder( int index) { return getApplicationACLsFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6; */ public java.util.List getApplicationACLsBuilderList() { return getApplicationACLsFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> getApplicationACLsFieldBuilder() { if (applicationACLsBuilder_ == null) { applicationACLsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder>( applicationACLs_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean()); applicationACLs_ = null; } return applicationACLsBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto containerRetryContext_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder> containerRetryContextBuilder_; /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return Whether the containerRetryContext field is set. */ public boolean hasContainerRetryContext() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; * @return The containerRetryContext. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext() { if (containerRetryContextBuilder_ == null) { return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_; } else { return containerRetryContextBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public Builder setContainerRetryContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto value) { if (containerRetryContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerRetryContext_ = value; } else { containerRetryContextBuilder_.setMessage(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public Builder setContainerRetryContext( org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder builderForValue) { if (containerRetryContextBuilder_ == null) { containerRetryContext_ = builderForValue.build(); } else { containerRetryContextBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public Builder mergeContainerRetryContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto value) { if (containerRetryContextBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0) && containerRetryContext_ != null && containerRetryContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance()) { getContainerRetryContextBuilder().mergeFrom(value); } else { containerRetryContext_ = value; } } else { containerRetryContextBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public Builder clearContainerRetryContext() { bitField0_ = (bitField0_ & ~0x00000040); containerRetryContext_ = null; if (containerRetryContextBuilder_ != null) { containerRetryContextBuilder_.dispose(); containerRetryContextBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder getContainerRetryContextBuilder() { bitField0_ |= 0x00000040; onChanged(); return getContainerRetryContextFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder() { if (containerRetryContextBuilder_ != null) { return containerRetryContextBuilder_.getMessageOrBuilder(); } else { return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_; } } /** * optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder> getContainerRetryContextFieldBuilder() { if (containerRetryContextBuilder_ == null) { containerRetryContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder>( getContainerRetryContext(), getParentForChildren(), isClean()); containerRetryContext_ = null; } return containerRetryContextBuilder_; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes tokens_conf = 8; * @return Whether the tokensConf field is set. */ @java.lang.Override public boolean hasTokensConf() { return ((bitField0_ & 0x00000080) != 0); } /** * optional bytes tokens_conf = 8; * @return The tokensConf. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf() { return tokensConf_; } /** * optional bytes tokens_conf = 8; * @param value The tokensConf to set. * @return This builder for chaining. */ public Builder setTokensConf(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } tokensConf_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * optional bytes tokens_conf = 8; * @return This builder for chaining. */ public Builder clearTokensConf() { bitField0_ = (bitField0_ & ~0x00000080); tokensConf_ = getDefaultInstance().getTokensConf(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerLaunchContextProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerLaunchContextProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerLaunchContextProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerStatusProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerStatusProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ boolean hasContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId(); /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder(); /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return Whether the state field is set. */ boolean hasState(); /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return The state. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState(); /** * optional string diagnostics = 3 [default = "N/A"]; * @return Whether the diagnostics field is set. */ boolean hasDiagnostics(); /** * optional string diagnostics = 3 [default = "N/A"]; * @return The diagnostics. */ java.lang.String getDiagnostics(); /** * optional string diagnostics = 3 [default = "N/A"]; * @return The bytes for diagnostics. */ org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes(); /** * optional int32 exit_status = 4 [default = -1000]; * @return Whether the exitStatus field is set. */ boolean hasExitStatus(); /** * optional int32 exit_status = 4 [default = -1000]; * @return The exitStatus. */ int getExitStatus(); /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ boolean hasCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability(); /** * optional .hadoop.yarn.ResourceProto capability = 5; */ org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder(); /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return Whether the executionType field is set. */ boolean hasExecutionType(); /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return The executionType. */ org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType(); /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ java.util.List getContainerAttributesList(); /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index); /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ int getContainerAttributesCount(); /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ java.util.List getContainerAttributesOrBuilderList(); /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder( int index); /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return Whether the containerSubState field is set. */ boolean hasContainerSubState(); /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return The containerSubState. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState(); } /** * Protobuf type {@code hadoop.yarn.ContainerStatusProto} */ public static final class ContainerStatusProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerStatusProto) ContainerStatusProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerStatusProto.newBuilder() to construct. private ContainerStatusProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerStatusProto() { state_ = 1; diagnostics_ = "N/A"; exitStatus_ = -1000; executionType_ = 1; containerAttributes_ = java.util.Collections.emptyList(); containerSubState_ = 1; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerStatusProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder.class); } private int bitField0_; public static final int CONTAINER_ID_FIELD_NUMBER = 1; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ @java.lang.Override public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } public static final int STATE_FIELD_NUMBER = 2; private int state_ = 1; /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return The state. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(state_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result; } public static final int DIAGNOSTICS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 3 [default = "N/A"]; * @return Whether the diagnostics field is set. */ @java.lang.Override public boolean hasDiagnostics() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string diagnostics = 3 [default = "N/A"]; * @return The diagnostics. */ @java.lang.Override public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } } /** * optional string diagnostics = 3 [default = "N/A"]; * @return The bytes for diagnostics. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int EXIT_STATUS_FIELD_NUMBER = 4; private int exitStatus_ = -1000; /** * optional int32 exit_status = 4 [default = -1000]; * @return Whether the exitStatus field is set. */ @java.lang.Override public boolean hasExitStatus() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 exit_status = 4 [default = -1000]; * @return The exitStatus. */ @java.lang.Override public int getExitStatus() { return exitStatus_; } public static final int CAPABILITY_FIELD_NUMBER = 5; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ @java.lang.Override public boolean hasCapability() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } public static final int EXECUTIONTYPE_FIELD_NUMBER = 6; private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } public static final int CONTAINER_ATTRIBUTES_FIELD_NUMBER = 7; @SuppressWarnings("serial") private java.util.List containerAttributes_; /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ @java.lang.Override public java.util.List getContainerAttributesList() { return containerAttributes_; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ @java.lang.Override public java.util.List getContainerAttributesOrBuilderList() { return containerAttributes_; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ @java.lang.Override public int getContainerAttributesCount() { return containerAttributes_.size(); } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index) { return containerAttributes_.get(index); } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder( int index) { return containerAttributes_.get(index); } public static final int CONTAINER_SUB_STATE_FIELD_NUMBER = 8; private int containerSubState_ = 1; /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return Whether the containerSubState field is set. */ @java.lang.Override public boolean hasContainerSubState() { return ((bitField0_ & 0x00000040) != 0); } /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return The containerSubState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(containerSubState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.CSS_SCHEDULED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasCapability()) { if (!getCapability().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeEnum(2, state_); } if (((bitField0_ & 0x00000004) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnostics_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt32(4, exitStatus_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(5, getCapability()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeEnum(6, executionType_); } for (int i = 0; i < containerAttributes_.size(); i++) { output.writeMessage(7, containerAttributes_.get(i)); } if (((bitField0_ & 0x00000040) != 0)) { output.writeEnum(8, containerSubState_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(1, getContainerId()); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(2, state_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnostics_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, exitStatus_); } if (((bitField0_ & 0x00000010) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(5, getCapability()); } if (((bitField0_ & 0x00000020) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(6, executionType_); } for (int i = 0; i < containerAttributes_.size(); i++) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(7, containerAttributes_.get(i)); } if (((bitField0_ & 0x00000040) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(8, containerSubState_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto) obj; if (hasContainerId() != other.hasContainerId()) return false; if (hasContainerId()) { if (!getContainerId() .equals(other.getContainerId())) return false; } if (hasState() != other.hasState()) return false; if (hasState()) { if (state_ != other.state_) return false; } if (hasDiagnostics() != other.hasDiagnostics()) return false; if (hasDiagnostics()) { if (!getDiagnostics() .equals(other.getDiagnostics())) return false; } if (hasExitStatus() != other.hasExitStatus()) return false; if (hasExitStatus()) { if (getExitStatus() != other.getExitStatus()) return false; } if (hasCapability() != other.hasCapability()) return false; if (hasCapability()) { if (!getCapability() .equals(other.getCapability())) return false; } if (hasExecutionType() != other.hasExecutionType()) return false; if (hasExecutionType()) { if (executionType_ != other.executionType_) return false; } if (!getContainerAttributesList() .equals(other.getContainerAttributesList())) return false; if (hasContainerSubState() != other.hasContainerSubState()) return false; if (hasContainerSubState()) { if (containerSubState_ != other.containerSubState_) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContainerId()) { hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER; hash = (53 * hash) + getContainerId().hashCode(); } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } if (hasDiagnostics()) { hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER; hash = (53 * hash) + getDiagnostics().hashCode(); } if (hasExitStatus()) { hash = (37 * hash) + EXIT_STATUS_FIELD_NUMBER; hash = (53 * hash) + getExitStatus(); } if (hasCapability()) { hash = (37 * hash) + CAPABILITY_FIELD_NUMBER; hash = (53 * hash) + getCapability().hashCode(); } if (hasExecutionType()) { hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER; hash = (53 * hash) + executionType_; } if (getContainerAttributesCount() > 0) { hash = (37 * hash) + CONTAINER_ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getContainerAttributesList().hashCode(); } if (hasContainerSubState()) { hash = (37 * hash) + CONTAINER_SUB_STATE_FIELD_NUMBER; hash = (53 * hash) + containerSubState_; } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerStatusProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerStatusProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getContainerIdFieldBuilder(); getCapabilityFieldBuilder(); getContainerAttributesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; containerId_ = null; if (containerIdBuilder_ != null) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; } state_ = 1; diagnostics_ = "N/A"; exitStatus_ = -1000; capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } executionType_ = 1; if (containerAttributesBuilder_ == null) { containerAttributes_ = java.util.Collections.emptyList(); } else { containerAttributes_ = null; containerAttributesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000040); containerSubState_ = 1; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result) { if (containerAttributesBuilder_ == null) { if (((bitField0_ & 0x00000040) != 0)) { containerAttributes_ = java.util.Collections.unmodifiableList(containerAttributes_); bitField0_ = (bitField0_ & ~0x00000040); } result.containerAttributes_ = containerAttributes_; } else { result.containerAttributes_ = containerAttributesBuilder_.build(); } } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.containerId_ = containerIdBuilder_ == null ? containerId_ : containerIdBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.state_ = state_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.diagnostics_ = diagnostics_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000008) != 0)) { result.exitStatus_ = exitStatus_; to_bitField0_ |= 0x00000008; } if (((from_bitField0_ & 0x00000010) != 0)) { result.capability_ = capabilityBuilder_ == null ? capability_ : capabilityBuilder_.build(); to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000020) != 0)) { result.executionType_ = executionType_; to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000080) != 0)) { result.containerSubState_ = containerSubState_; to_bitField0_ |= 0x00000040; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()) return this; if (other.hasContainerId()) { mergeContainerId(other.getContainerId()); } if (other.hasState()) { setState(other.getState()); } if (other.hasDiagnostics()) { diagnostics_ = other.diagnostics_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasExitStatus()) { setExitStatus(other.getExitStatus()); } if (other.hasCapability()) { mergeCapability(other.getCapability()); } if (other.hasExecutionType()) { setExecutionType(other.getExecutionType()); } if (containerAttributesBuilder_ == null) { if (!other.containerAttributes_.isEmpty()) { if (containerAttributes_.isEmpty()) { containerAttributes_ = other.containerAttributes_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureContainerAttributesIsMutable(); containerAttributes_.addAll(other.containerAttributes_); } onChanged(); } } else { if (!other.containerAttributes_.isEmpty()) { if (containerAttributesBuilder_.isEmpty()) { containerAttributesBuilder_.dispose(); containerAttributesBuilder_ = null; containerAttributes_ = other.containerAttributes_; bitField0_ = (bitField0_ & ~0x00000040); containerAttributesBuilder_ = org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getContainerAttributesFieldBuilder() : null; } else { containerAttributesBuilder_.addAllMessages(other.containerAttributes_); } } } if (other.hasContainerSubState()) { setContainerSubState(other.getContainerSubState()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasCapability()) { if (!getCapability().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getContainerIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(2, tmpRaw); } else { state_ = tmpRaw; bitField0_ |= 0x00000002; } break; } // case 16 case 26: { diagnostics_ = input.readBytes(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { exitStatus_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 case 42: { input.readMessage( getCapabilityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000010; break; } // case 42 case 48: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(6, tmpRaw); } else { executionType_ = tmpRaw; bitField0_ |= 0x00000020; } break; } // case 48 case 58: { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m = input.readMessage( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry); if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); containerAttributes_.add(m); } else { containerAttributesBuilder_.addMessage(m); } break; } // case 58 case 64: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(8, tmpRaw); } else { containerSubState_ = tmpRaw; bitField0_ |= 0x00000080; } break; } // case 64 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_; /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return Whether the containerId field is set. */ public boolean hasContainerId() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; * @return The containerId. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() { if (containerIdBuilder_ == null) { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } else { return containerIdBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } containerId_ = value; } else { containerIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder setContainerId( org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) { if (containerIdBuilder_ == null) { containerId_ = builderForValue.build(); } else { containerIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder mergeContainerId(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) { if (containerIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && containerId_ != null && containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) { getContainerIdBuilder().mergeFrom(value); } else { containerId_ = value; } } else { containerIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public Builder clearContainerId() { bitField0_ = (bitField0_ & ~0x00000001); containerId_ = null; if (containerIdBuilder_ != null) { containerIdBuilder_.dispose(); containerIdBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContainerIdFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() { if (containerIdBuilder_ != null) { return containerIdBuilder_.getMessageOrBuilder(); } else { return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_; } } /** * optional .hadoop.yarn.ContainerIdProto container_id = 1; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> getContainerIdFieldBuilder() { if (containerIdBuilder_ == null) { containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>( getContainerId(), getParentForChildren(), isClean()); containerId_ = null; } return containerIdBuilder_; } private int state_ = 1; /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return Whether the state field is set. */ @java.lang.Override public boolean hasState() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return The state. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(state_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result; } /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @param value The state to set. * @return This builder for chaining. */ public Builder setState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; state_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ContainerStateProto state = 2; * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); state_ = 1; onChanged(); return this; } private java.lang.Object diagnostics_ = "N/A"; /** * optional string diagnostics = 3 [default = "N/A"]; * @return Whether the diagnostics field is set. */ public boolean hasDiagnostics() { return ((bitField0_ & 0x00000004) != 0); } /** * optional string diagnostics = 3 [default = "N/A"]; * @return The diagnostics. */ public java.lang.String getDiagnostics() { java.lang.Object ref = diagnostics_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { diagnostics_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string diagnostics = 3 [default = "N/A"]; * @return The bytes for diagnostics. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getDiagnosticsBytes() { java.lang.Object ref = diagnostics_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); diagnostics_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string diagnostics = 3 [default = "N/A"]; * @param value The diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnostics( java.lang.String value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional string diagnostics = 3 [default = "N/A"]; * @return This builder for chaining. */ public Builder clearDiagnostics() { diagnostics_ = getDefaultInstance().getDiagnostics(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * optional string diagnostics = 3 [default = "N/A"]; * @param value The bytes for diagnostics to set. * @return This builder for chaining. */ public Builder setDiagnosticsBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } diagnostics_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int exitStatus_ = -1000; /** * optional int32 exit_status = 4 [default = -1000]; * @return Whether the exitStatus field is set. */ @java.lang.Override public boolean hasExitStatus() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 exit_status = 4 [default = -1000]; * @return The exitStatus. */ @java.lang.Override public int getExitStatus() { return exitStatus_; } /** * optional int32 exit_status = 4 [default = -1000]; * @param value The exitStatus to set. * @return This builder for chaining. */ public Builder setExitStatus(int value) { exitStatus_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int32 exit_status = 4 [default = -1000]; * @return This builder for chaining. */ public Builder clearExitStatus() { bitField0_ = (bitField0_ & ~0x00000008); exitStatus_ = -1000; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_; /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return Whether the capability field is set. */ public boolean hasCapability() { return ((bitField0_ & 0x00000010) != 0); } /** * optional .hadoop.yarn.ResourceProto capability = 5; * @return The capability. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() { if (capabilityBuilder_ == null) { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } else { return capabilityBuilder_.getMessage(); } } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder setCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capability_ = value; } else { capabilityBuilder_.setMessage(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder setCapability( org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) { if (capabilityBuilder_ == null) { capability_ = builderForValue.build(); } else { capabilityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder mergeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) { if (capabilityBuilder_ == null) { if (((bitField0_ & 0x00000010) != 0) && capability_ != null && capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) { getCapabilityBuilder().mergeFrom(value); } else { capability_ = value; } } else { capabilityBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public Builder clearCapability() { bitField0_ = (bitField0_ & ~0x00000010); capability_ = null; if (capabilityBuilder_ != null) { capabilityBuilder_.dispose(); capabilityBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() { bitField0_ |= 0x00000010; onChanged(); return getCapabilityFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() { if (capabilityBuilder_ != null) { return capabilityBuilder_.getMessageOrBuilder(); } else { return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_; } } /** * optional .hadoop.yarn.ResourceProto capability = 5; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> getCapabilityFieldBuilder() { if (capabilityBuilder_ == null) { capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>( getCapability(), getParentForChildren(), isClean()); capability_ = null; } return capabilityBuilder_; } private int executionType_ = 1; /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return Whether the executionType field is set. */ @java.lang.Override public boolean hasExecutionType() { return ((bitField0_ & 0x00000020) != 0); } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return The executionType. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() { org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result; } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @param value The executionType to set. * @return This builder for chaining. */ public Builder setExecutionType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; executionType_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED]; * @return This builder for chaining. */ public Builder clearExecutionType() { bitField0_ = (bitField0_ & ~0x00000020); executionType_ = 1; onChanged(); return this; } private java.util.List containerAttributes_ = java.util.Collections.emptyList(); private void ensureContainerAttributesIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { containerAttributes_ = new java.util.ArrayList(containerAttributes_); bitField0_ |= 0x00000040; } } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> containerAttributesBuilder_; /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public java.util.List getContainerAttributesList() { if (containerAttributesBuilder_ == null) { return java.util.Collections.unmodifiableList(containerAttributes_); } else { return containerAttributesBuilder_.getMessageList(); } } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public int getContainerAttributesCount() { if (containerAttributesBuilder_ == null) { return containerAttributes_.size(); } else { return containerAttributesBuilder_.getCount(); } } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index) { if (containerAttributesBuilder_ == null) { return containerAttributes_.get(index); } else { return containerAttributesBuilder_.getMessage(index); } } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder setContainerAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (containerAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerAttributesIsMutable(); containerAttributes_.set(index, value); onChanged(); } else { containerAttributesBuilder_.setMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder setContainerAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); containerAttributes_.set(index, builderForValue.build()); onChanged(); } else { containerAttributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder addContainerAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (containerAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerAttributesIsMutable(); containerAttributes_.add(value); onChanged(); } else { containerAttributesBuilder_.addMessage(value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder addContainerAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) { if (containerAttributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureContainerAttributesIsMutable(); containerAttributes_.add(index, value); onChanged(); } else { containerAttributesBuilder_.addMessage(index, value); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder addContainerAttributes( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); containerAttributes_.add(builderForValue.build()); onChanged(); } else { containerAttributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder addContainerAttributes( int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) { if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); containerAttributes_.add(index, builderForValue.build()); onChanged(); } else { containerAttributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder addAllContainerAttributes( java.lang.Iterable values) { if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, containerAttributes_); onChanged(); } else { containerAttributesBuilder_.addAllMessages(values); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder clearContainerAttributes() { if (containerAttributesBuilder_ == null) { containerAttributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); } else { containerAttributesBuilder_.clear(); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public Builder removeContainerAttributes(int index) { if (containerAttributesBuilder_ == null) { ensureContainerAttributesIsMutable(); containerAttributes_.remove(index); onChanged(); } else { containerAttributesBuilder_.remove(index); } return this; } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getContainerAttributesBuilder( int index) { return getContainerAttributesFieldBuilder().getBuilder(index); } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder( int index) { if (containerAttributesBuilder_ == null) { return containerAttributes_.get(index); } else { return containerAttributesBuilder_.getMessageOrBuilder(index); } } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public java.util.List getContainerAttributesOrBuilderList() { if (containerAttributesBuilder_ != null) { return containerAttributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(containerAttributes_); } } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addContainerAttributesBuilder() { return getContainerAttributesFieldBuilder().addBuilder( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addContainerAttributesBuilder( int index) { return getContainerAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()); } /** * repeated .hadoop.yarn.StringStringMapProto container_attributes = 7; */ public java.util.List getContainerAttributesBuilderList() { return getContainerAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> getContainerAttributesFieldBuilder() { if (containerAttributesBuilder_ == null) { containerAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>( containerAttributes_, ((bitField0_ & 0x00000040) != 0), getParentForChildren(), isClean()); containerAttributes_ = null; } return containerAttributesBuilder_; } private int containerSubState_ = 1; /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return Whether the containerSubState field is set. */ @java.lang.Override public boolean hasContainerSubState() { return ((bitField0_ & 0x00000080) != 0); } /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return The containerSubState. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(containerSubState_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.CSS_SCHEDULED : result; } /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @param value The containerSubState to set. * @return This builder for chaining. */ public Builder setContainerSubState(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; containerSubState_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8; * @return This builder for chaining. */ public Builder clearContainerSubState() { bitField0_ = (bitField0_ & ~0x00000080); containerSubState_ = 1; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerStatusProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerStatusProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerStatusProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ContainerRetryContextProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerRetryContextProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return Whether the retryPolicy field is set. */ boolean hasRetryPolicy(); /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return The retryPolicy. */ org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy(); /** * repeated int32 error_codes = 2; * @return A list containing the errorCodes. */ java.util.List getErrorCodesList(); /** * repeated int32 error_codes = 2; * @return The count of errorCodes. */ int getErrorCodesCount(); /** * repeated int32 error_codes = 2; * @param index The index of the element to return. * @return The errorCodes at the given index. */ int getErrorCodes(int index); /** * optional int32 max_retries = 3 [default = 0]; * @return Whether the maxRetries field is set. */ boolean hasMaxRetries(); /** * optional int32 max_retries = 3 [default = 0]; * @return The maxRetries. */ int getMaxRetries(); /** * optional int32 retry_interval = 4 [default = 0]; * @return Whether the retryInterval field is set. */ boolean hasRetryInterval(); /** * optional int32 retry_interval = 4 [default = 0]; * @return The retryInterval. */ int getRetryInterval(); /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return Whether the failuresValidityInterval field is set. */ boolean hasFailuresValidityInterval(); /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return The failuresValidityInterval. */ long getFailuresValidityInterval(); } /** * Protobuf type {@code hadoop.yarn.ContainerRetryContextProto} */ public static final class ContainerRetryContextProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerRetryContextProto) ContainerRetryContextProtoOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerRetryContextProto.newBuilder() to construct. private ContainerRetryContextProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private ContainerRetryContextProto() { retryPolicy_ = 0; errorCodes_ = emptyIntList(); failuresValidityInterval_ = -1L; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ContainerRetryContextProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder.class); } private int bitField0_; public static final int RETRY_POLICY_FIELD_NUMBER = 1; private int retryPolicy_ = 0; /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return Whether the retryPolicy field is set. */ @java.lang.Override public boolean hasRetryPolicy() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return The retryPolicy. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(retryPolicy_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.NEVER_RETRY : result; } public static final int ERROR_CODES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList errorCodes_; /** * repeated int32 error_codes = 2; * @return A list containing the errorCodes. */ @java.lang.Override public java.util.List getErrorCodesList() { return errorCodes_; } /** * repeated int32 error_codes = 2; * @return The count of errorCodes. */ public int getErrorCodesCount() { return errorCodes_.size(); } /** * repeated int32 error_codes = 2; * @param index The index of the element to return. * @return The errorCodes at the given index. */ public int getErrorCodes(int index) { return errorCodes_.getInt(index); } public static final int MAX_RETRIES_FIELD_NUMBER = 3; private int maxRetries_ = 0; /** * optional int32 max_retries = 3 [default = 0]; * @return Whether the maxRetries field is set. */ @java.lang.Override public boolean hasMaxRetries() { return ((bitField0_ & 0x00000002) != 0); } /** * optional int32 max_retries = 3 [default = 0]; * @return The maxRetries. */ @java.lang.Override public int getMaxRetries() { return maxRetries_; } public static final int RETRY_INTERVAL_FIELD_NUMBER = 4; private int retryInterval_ = 0; /** * optional int32 retry_interval = 4 [default = 0]; * @return Whether the retryInterval field is set. */ @java.lang.Override public boolean hasRetryInterval() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 retry_interval = 4 [default = 0]; * @return The retryInterval. */ @java.lang.Override public int getRetryInterval() { return retryInterval_; } public static final int FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER = 5; private long failuresValidityInterval_ = -1L; /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return Whether the failuresValidityInterval field is set. */ @java.lang.Override public boolean hasFailuresValidityInterval() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return The failuresValidityInterval. */ @java.lang.Override public long getFailuresValidityInterval() { return failuresValidityInterval_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, retryPolicy_); } for (int i = 0; i < errorCodes_.size(); i++) { output.writeInt32(2, errorCodes_.getInt(i)); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(3, maxRetries_); } if (((bitField0_ & 0x00000004) != 0)) { output.writeInt32(4, retryInterval_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeInt64(5, failuresValidityInterval_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeEnumSize(1, retryPolicy_); } { int dataSize = 0; for (int i = 0; i < errorCodes_.size(); i++) { dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32SizeNoTag(errorCodes_.getInt(i)); } size += dataSize; size += 1 * getErrorCodesList().size(); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(3, maxRetries_); } if (((bitField0_ & 0x00000004) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt32Size(4, retryInterval_); } if (((bitField0_ & 0x00000008) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeInt64Size(5, failuresValidityInterval_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto) obj; if (hasRetryPolicy() != other.hasRetryPolicy()) return false; if (hasRetryPolicy()) { if (retryPolicy_ != other.retryPolicy_) return false; } if (!getErrorCodesList() .equals(other.getErrorCodesList())) return false; if (hasMaxRetries() != other.hasMaxRetries()) return false; if (hasMaxRetries()) { if (getMaxRetries() != other.getMaxRetries()) return false; } if (hasRetryInterval() != other.hasRetryInterval()) return false; if (hasRetryInterval()) { if (getRetryInterval() != other.getRetryInterval()) return false; } if (hasFailuresValidityInterval() != other.hasFailuresValidityInterval()) return false; if (hasFailuresValidityInterval()) { if (getFailuresValidityInterval() != other.getFailuresValidityInterval()) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRetryPolicy()) { hash = (37 * hash) + RETRY_POLICY_FIELD_NUMBER; hash = (53 * hash) + retryPolicy_; } if (getErrorCodesCount() > 0) { hash = (37 * hash) + ERROR_CODES_FIELD_NUMBER; hash = (53 * hash) + getErrorCodesList().hashCode(); } if (hasMaxRetries()) { hash = (37 * hash) + MAX_RETRIES_FIELD_NUMBER; hash = (53 * hash) + getMaxRetries(); } if (hasRetryInterval()) { hash = (37 * hash) + RETRY_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + getRetryInterval(); } if (hasFailuresValidityInterval()) { hash = (37 * hash) + FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong( getFailuresValidityInterval()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.ContainerRetryContextProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerRetryContextProto) org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; retryPolicy_ = 0; errorCodes_ = emptyIntList(); maxRetries_ = 0; retryInterval_ = 0; failuresValidityInterval_ = -1L; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto build() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result) { if (((bitField0_ & 0x00000002) != 0)) { errorCodes_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000002); } result.errorCodes_ = errorCodes_; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.retryPolicy_ = retryPolicy_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.maxRetries_ = maxRetries_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000008) != 0)) { result.retryInterval_ = retryInterval_; to_bitField0_ |= 0x00000004; } if (((from_bitField0_ & 0x00000010) != 0)) { result.failuresValidityInterval_ = failuresValidityInterval_; to_bitField0_ |= 0x00000008; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance()) return this; if (other.hasRetryPolicy()) { setRetryPolicy(other.getRetryPolicy()); } if (!other.errorCodes_.isEmpty()) { if (errorCodes_.isEmpty()) { errorCodes_ = other.errorCodes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureErrorCodesIsMutable(); errorCodes_.addAll(other.errorCodes_); } onChanged(); } if (other.hasMaxRetries()) { setMaxRetries(other.getMaxRetries()); } if (other.hasRetryInterval()) { setRetryInterval(other.getRetryInterval()); } if (other.hasFailuresValidityInterval()) { setFailuresValidityInterval(other.getFailuresValidityInterval()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto tmpValue = org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(tmpRaw); if (tmpValue == null) { mergeUnknownVarintField(1, tmpRaw); } else { retryPolicy_ = tmpRaw; bitField0_ |= 0x00000001; } break; } // case 8 case 16: { int v = input.readInt32(); ensureErrorCodesIsMutable(); errorCodes_.addInt(v); break; } // case 16 case 18: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); ensureErrorCodesIsMutable(); while (input.getBytesUntilLimit() > 0) { errorCodes_.addInt(input.readInt32()); } input.popLimit(limit); break; } // case 18 case 24: { maxRetries_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { retryInterval_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { failuresValidityInterval_ = input.readInt64(); bitField0_ |= 0x00000010; break; } // case 40 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int retryPolicy_ = 0; /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return Whether the retryPolicy field is set. */ @java.lang.Override public boolean hasRetryPolicy() { return ((bitField0_ & 0x00000001) != 0); } /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return The retryPolicy. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy() { org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(retryPolicy_); return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.NEVER_RETRY : result; } /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @param value The retryPolicy to set. * @return This builder for chaining. */ public Builder setRetryPolicy(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; retryPolicy_ = value.getNumber(); onChanged(); return this; } /** * optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY]; * @return This builder for chaining. */ public Builder clearRetryPolicy() { bitField0_ = (bitField0_ & ~0x00000001); retryPolicy_ = 0; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.IntList errorCodes_ = emptyIntList(); private void ensureErrorCodesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { errorCodes_ = mutableCopy(errorCodes_); bitField0_ |= 0x00000002; } } /** * repeated int32 error_codes = 2; * @return A list containing the errorCodes. */ public java.util.List getErrorCodesList() { return ((bitField0_ & 0x00000002) != 0) ? java.util.Collections.unmodifiableList(errorCodes_) : errorCodes_; } /** * repeated int32 error_codes = 2; * @return The count of errorCodes. */ public int getErrorCodesCount() { return errorCodes_.size(); } /** * repeated int32 error_codes = 2; * @param index The index of the element to return. * @return The errorCodes at the given index. */ public int getErrorCodes(int index) { return errorCodes_.getInt(index); } /** * repeated int32 error_codes = 2; * @param index The index to set the value at. * @param value The errorCodes to set. * @return This builder for chaining. */ public Builder setErrorCodes( int index, int value) { ensureErrorCodesIsMutable(); errorCodes_.setInt(index, value); onChanged(); return this; } /** * repeated int32 error_codes = 2; * @param value The errorCodes to add. * @return This builder for chaining. */ public Builder addErrorCodes(int value) { ensureErrorCodesIsMutable(); errorCodes_.addInt(value); onChanged(); return this; } /** * repeated int32 error_codes = 2; * @param values The errorCodes to add. * @return This builder for chaining. */ public Builder addAllErrorCodes( java.lang.Iterable values) { ensureErrorCodesIsMutable(); org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll( values, errorCodes_); onChanged(); return this; } /** * repeated int32 error_codes = 2; * @return This builder for chaining. */ public Builder clearErrorCodes() { errorCodes_ = emptyIntList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } private int maxRetries_ ; /** * optional int32 max_retries = 3 [default = 0]; * @return Whether the maxRetries field is set. */ @java.lang.Override public boolean hasMaxRetries() { return ((bitField0_ & 0x00000004) != 0); } /** * optional int32 max_retries = 3 [default = 0]; * @return The maxRetries. */ @java.lang.Override public int getMaxRetries() { return maxRetries_; } /** * optional int32 max_retries = 3 [default = 0]; * @param value The maxRetries to set. * @return This builder for chaining. */ public Builder setMaxRetries(int value) { maxRetries_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * optional int32 max_retries = 3 [default = 0]; * @return This builder for chaining. */ public Builder clearMaxRetries() { bitField0_ = (bitField0_ & ~0x00000004); maxRetries_ = 0; onChanged(); return this; } private int retryInterval_ ; /** * optional int32 retry_interval = 4 [default = 0]; * @return Whether the retryInterval field is set. */ @java.lang.Override public boolean hasRetryInterval() { return ((bitField0_ & 0x00000008) != 0); } /** * optional int32 retry_interval = 4 [default = 0]; * @return The retryInterval. */ @java.lang.Override public int getRetryInterval() { return retryInterval_; } /** * optional int32 retry_interval = 4 [default = 0]; * @param value The retryInterval to set. * @return This builder for chaining. */ public Builder setRetryInterval(int value) { retryInterval_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * optional int32 retry_interval = 4 [default = 0]; * @return This builder for chaining. */ public Builder clearRetryInterval() { bitField0_ = (bitField0_ & ~0x00000008); retryInterval_ = 0; onChanged(); return this; } private long failuresValidityInterval_ = -1L; /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return Whether the failuresValidityInterval field is set. */ @java.lang.Override public boolean hasFailuresValidityInterval() { return ((bitField0_ & 0x00000010) != 0); } /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return The failuresValidityInterval. */ @java.lang.Override public long getFailuresValidityInterval() { return failuresValidityInterval_; } /** * optional int64 failures_validity_interval = 5 [default = -1]; * @param value The failuresValidityInterval to set. * @return This builder for chaining. */ public Builder setFailuresValidityInterval(long value) { failuresValidityInterval_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * optional int64 failures_validity_interval = 5 [default = -1]; * @return This builder for chaining. */ public Builder clearFailuresValidityInterval() { bitField0_ = (bitField0_ & ~0x00000010); failuresValidityInterval_ = -1L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerRetryContextProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerRetryContextProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public ContainerRetryContextProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StringLocalResourceMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringLocalResourceMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * optional string key = 1; * @return The key. */ java.lang.String getKey(); /** * optional string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return The value. */ org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue(); /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder(); } /** *
   *//////////////////////////////////////////////////////////////////////
   * //// From common//////////////////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * 
* * Protobuf type {@code hadoop.yarn.StringLocalResourceMapProto} */ public static final class StringLocalResourceMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StringLocalResourceMapProto) StringLocalResourceMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StringLocalResourceMapProto.newBuilder() to construct. private StringLocalResourceMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringLocalResourceMapProto() { key_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringLocalResourceMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * optional string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value_; /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return The value. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue() { return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_; } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder() { return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getValue()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getValue()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (!getValue() .equals(other.getValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     *//////////////////////////////////////////////////////////////////////
     * //// From common//////////////////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * 
* * Protobuf type {@code hadoop.yarn.StringLocalResourceMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringLocalResourceMapProto) org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getValueFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = null; if (valueBuilder_ != null) { valueBuilder_.dispose(); valueBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = valueBuilder_ == null ? value_ : valueBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { mergeValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getValueFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> valueBuilder_; /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return Whether the value field is set. */ public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.yarn.LocalResourceProto value = 2; * @return The value. */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue() { if (valueBuilder_ == null) { return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_; } else { return valueBuilder_.getMessage(); } } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public Builder setValue(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } value_ = value; } else { valueBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public Builder setValue( org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder builderForValue) { if (valueBuilder_ == null) { value_ = builderForValue.build(); } else { valueBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public Builder mergeValue(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && value_ != null && value_ != org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance()) { getValueBuilder().mergeFrom(value); } else { value_ = value; } } else { valueBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = null; if (valueBuilder_ != null) { valueBuilder_.dispose(); valueBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder getValueBuilder() { bitField0_ |= 0x00000002; onChanged(); return getValueFieldBuilder().getBuilder(); } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_; } } /** * optional .hadoop.yarn.LocalResourceProto value = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder>( getValue(), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringLocalResourceMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StringLocalResourceMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StringLocalResourceMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StringStringMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringStringMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * optional string key = 1; * @return The key. */ java.lang.String getKey(); /** * optional string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * optional string value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * optional string value = 2; * @return The value. */ java.lang.String getValue(); /** * optional string value = 2; * @return The bytes for value. */ org.apache.hadoop.thirdparty.protobuf.ByteString getValueBytes(); } /** * Protobuf type {@code hadoop.yarn.StringStringMapProto} */ public static final class StringStringMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StringStringMapProto) StringStringMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StringStringMapProto.newBuilder() to construct. private StringStringMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringStringMapProto() { key_ = ""; value_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringStringMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * optional string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object value_ = ""; /** * optional string value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string value = 2; * @return The value. */ @java.lang.Override public java.lang.String getValue() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { value_ = s; } return s; } } /** * optional string value = 2; * @return The bytes for value. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (!getValue() .equals(other.getValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StringStringMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringStringMapProto) org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = ""; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = value_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { value_ = other.value_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { value_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object value_ = ""; /** * optional string value = 2; * @return Whether the value field is set. */ public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional string value = 2; * @return The value. */ public java.lang.String getValue() { java.lang.Object ref = value_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { value_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string value = 2; * @return The bytes for value. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string value = 2; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional string value = 2; * @return This builder for chaining. */ public Builder clearValue() { value_ = getDefaultInstance().getValue(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * optional string value = 2; * @param value The bytes for value to set. * @return This builder for chaining. */ public Builder setValueBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringStringMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StringStringMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StringStringMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StringBytesMapProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringBytesMapProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string key = 1; * @return Whether the key field is set. */ boolean hasKey(); /** * optional string key = 1; * @return The key. */ java.lang.String getKey(); /** * optional string key = 1; * @return The bytes for key. */ org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes(); /** * optional bytes value = 2; * @return Whether the value field is set. */ boolean hasValue(); /** * optional bytes value = 2; * @return The value. */ org.apache.hadoop.thirdparty.protobuf.ByteString getValue(); } /** * Protobuf type {@code hadoop.yarn.StringBytesMapProto} */ public static final class StringBytesMapProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.StringBytesMapProto) StringBytesMapProtoOrBuilder { private static final long serialVersionUID = 0L; // Use StringBytesMapProto.newBuilder() to construct. private StringBytesMapProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private StringBytesMapProto() { key_ = ""; value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new StringBytesMapProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder.class); } private int bitField0_; public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } } /** * optional string key = 1; * @return The bytes for key. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bytes value = 2; * @return The value. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getValue() { return value_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeBytes(2, value_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeBytesSize(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto) obj; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey() .equals(other.getKey())) return false; } if (hasValue() != other.hasValue()) return false; if (hasValue()) { if (!getValue() .equals(other.getValue())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.StringBytesMapProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringBytesMapProto) org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.newBuilder() private Builder() { } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto build() { org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.value_ = value_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()) return this; if (other.hasKey()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { value_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * optional string key = 1; * @return Whether the key field is set. */ public boolean hasKey() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string key = 1; * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { key_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string key = 1; * @return The bytes for key. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); key_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string key = 1; * @param value The key to set. * @return This builder for chaining. */ public Builder setKey( java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string key = 1; * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string key = 1; * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY; /** * optional bytes value = 2; * @return Whether the value field is set. */ @java.lang.Override public boolean hasValue() { return ((bitField0_ & 0x00000002) != 0); } /** * optional bytes value = 2; * @return The value. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getValue() { return value_; } /** * optional bytes value = 2; * @param value The value to set. * @return This builder for chaining. */ public Builder setValue(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } value_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional bytes value = 2; * @return This builder for chaining. */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringBytesMapProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.StringBytesMapProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public StringBytesMapProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CollectorInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.yarn.CollectorInfoProto) org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder { /** * optional string collector_addr = 1; * @return Whether the collectorAddr field is set. */ boolean hasCollectorAddr(); /** * optional string collector_addr = 1; * @return The collectorAddr. */ java.lang.String getCollectorAddr(); /** * optional string collector_addr = 1; * @return The bytes for collectorAddr. */ org.apache.hadoop.thirdparty.protobuf.ByteString getCollectorAddrBytes(); /** * optional .hadoop.common.TokenProto collector_token = 2; * @return Whether the collectorToken field is set. */ boolean hasCollectorToken(); /** * optional .hadoop.common.TokenProto collector_token = 2; * @return The collectorToken. */ org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken(); /** * optional .hadoop.common.TokenProto collector_token = 2; */ org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder(); } /** * Protobuf type {@code hadoop.yarn.CollectorInfoProto} */ public static final class CollectorInfoProto extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hadoop.yarn.CollectorInfoProto) CollectorInfoProtoOrBuilder { private static final long serialVersionUID = 0L; // Use CollectorInfoProto.newBuilder() to construct. private CollectorInfoProto(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private CollectorInfoProto() { collectorAddr_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CollectorInfoProto(); } @java.lang.Override public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder.class); } private int bitField0_; public static final int COLLECTOR_ADDR_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object collectorAddr_ = ""; /** * optional string collector_addr = 1; * @return Whether the collectorAddr field is set. */ @java.lang.Override public boolean hasCollectorAddr() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string collector_addr = 1; * @return The collectorAddr. */ @java.lang.Override public java.lang.String getCollectorAddr() { java.lang.Object ref = collectorAddr_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { collectorAddr_ = s; } return s; } } /** * optional string collector_addr = 1; * @return The bytes for collectorAddr. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getCollectorAddrBytes() { java.lang.Object ref = collectorAddr_; if (ref instanceof java.lang.String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); collectorAddr_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } public static final int COLLECTOR_TOKEN_FIELD_NUMBER = 2; private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto collectorToken_; /** * optional .hadoop.common.TokenProto collector_token = 2; * @return Whether the collectorToken field is set. */ @java.lang.Override public boolean hasCollectorToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto collector_token = 2; * @return The collectorToken. */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken() { return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_; } /** * optional .hadoop.common.TokenProto collector_token = 2; */ @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder() { return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasCollectorToken()) { if (!getCollectorToken().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, collectorAddr_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getCollectorToken()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, collectorAddr_); } if (((bitField0_ & 0x00000002) != 0)) { size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream .computeMessageSize(2, getCollectorToken()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto)) { return super.equals(obj); } org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto) obj; if (hasCollectorAddr() != other.hasCollectorAddr()) return false; if (hasCollectorAddr()) { if (!getCollectorAddr() .equals(other.getCollectorAddr())) return false; } if (hasCollectorToken() != other.hasCollectorToken()) return false; if (hasCollectorToken()) { if (!getCollectorToken() .equals(other.getCollectorToken())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCollectorAddr()) { hash = (37 * hash) + COLLECTOR_ADDR_FIELD_NUMBER; hash = (53 * hash) + getCollectorAddr().hashCode(); } if (hasCollectorToken()) { hash = (37 * hash) + COLLECTOR_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getCollectorToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( java.nio.ByteBuffer data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( java.nio.ByteBuffer data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.ByteString data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(byte[] data) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( byte[] data, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.yarn.CollectorInfoProto} */ public static final class Builder extends org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:hadoop.yarn.CollectorInfoProto) org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder { public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor; } @java.lang.Override protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder.class); } // Construct using org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCollectorTokenFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; collectorAddr_ = ""; collectorToken_ = null; if (collectorTokenBuilder_ != null) { collectorTokenBuilder_.dispose(); collectorTokenBuilder_ = null; } return this; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstanceForType() { return org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance(); } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto build() { org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto buildPartial() { org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.collectorAddr_ = collectorAddr_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.collectorToken_ = collectorTokenBuilder_ == null ? collectorToken_ : collectorTokenBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) { if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto) { return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto other) { if (other == org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance()) return this; if (other.hasCollectorAddr()) { collectorAddr_ = other.collectorAddr_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCollectorToken()) { mergeCollectorToken(other.getCollectorToken()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (hasCollectorToken()) { if (!getCollectorToken().isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { collectorAddr_ = input.readBytes(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getCollectorTokenFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object collectorAddr_ = ""; /** * optional string collector_addr = 1; * @return Whether the collectorAddr field is set. */ public boolean hasCollectorAddr() { return ((bitField0_ & 0x00000001) != 0); } /** * optional string collector_addr = 1; * @return The collectorAddr. */ public java.lang.String getCollectorAddr() { java.lang.Object ref = collectorAddr_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.thirdparty.protobuf.ByteString bs = (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { collectorAddr_ = s; } return s; } else { return (java.lang.String) ref; } } /** * optional string collector_addr = 1; * @return The bytes for collectorAddr. */ public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString getCollectorAddrBytes() { java.lang.Object ref = collectorAddr_; if (ref instanceof String) { org.apache.hadoop.thirdparty.protobuf.ByteString b = org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); collectorAddr_ = b; return b; } else { return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref; } } /** * optional string collector_addr = 1; * @param value The collectorAddr to set. * @return This builder for chaining. */ public Builder setCollectorAddr( java.lang.String value) { if (value == null) { throw new NullPointerException(); } collectorAddr_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * optional string collector_addr = 1; * @return This builder for chaining. */ public Builder clearCollectorAddr() { collectorAddr_ = getDefaultInstance().getCollectorAddr(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * optional string collector_addr = 1; * @param value The bytes for collectorAddr to set. * @return This builder for chaining. */ public Builder setCollectorAddrBytes( org.apache.hadoop.thirdparty.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } collectorAddr_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto collectorToken_; private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> collectorTokenBuilder_; /** * optional .hadoop.common.TokenProto collector_token = 2; * @return Whether the collectorToken field is set. */ public boolean hasCollectorToken() { return ((bitField0_ & 0x00000002) != 0); } /** * optional .hadoop.common.TokenProto collector_token = 2; * @return The collectorToken. */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken() { if (collectorTokenBuilder_ == null) { return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_; } else { return collectorTokenBuilder_.getMessage(); } } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public Builder setCollectorToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (collectorTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } collectorToken_ = value; } else { collectorTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public Builder setCollectorToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (collectorTokenBuilder_ == null) { collectorToken_ = builderForValue.build(); } else { collectorTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public Builder mergeCollectorToken(org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (collectorTokenBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && collectorToken_ != null && collectorToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { getCollectorTokenBuilder().mergeFrom(value); } else { collectorToken_ = value; } } else { collectorTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public Builder clearCollectorToken() { bitField0_ = (bitField0_ & ~0x00000002); collectorToken_ = null; if (collectorTokenBuilder_ != null) { collectorTokenBuilder_.dispose(); collectorTokenBuilder_ = null; } onChanged(); return this; } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getCollectorTokenBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCollectorTokenFieldBuilder().getBuilder(); } /** * optional .hadoop.common.TokenProto collector_token = 2; */ public org.apache.hadoop.shaded.org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder() { if (collectorTokenBuilder_ != null) { return collectorTokenBuilder_.getMessageOrBuilder(); } else { return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_; } } /** * optional .hadoop.common.TokenProto collector_token = 2; */ private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> getCollectorTokenFieldBuilder() { if (collectorTokenBuilder_ == null) { collectorTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3< org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>( getCollectorToken(), getParentForChildren(), isClean()); collectorToken_ = null; } return collectorTokenBuilder_; } @java.lang.Override public final Builder setUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hadoop.yarn.CollectorInfoProto) } // @@protoc_insertion_point(class_scope:hadoop.yarn.CollectorInfoProto) private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto(); } public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() { @java.lang.Override public CollectorInfoProto parsePartialFrom( org.apache.hadoop.thirdparty.protobuf.CodedInputStream input, org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SerializedExceptionProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationIdProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerIdProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceInformationProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceOptionProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceProfileEntry_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceProfilesProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeResourceMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PriorityProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerReportProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_URLProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_URLProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_LocalResourceProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StringLongMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StringFloatMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationReportProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeIdProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeReportProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeLabelProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeAttributeProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_AttributeToNodesProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_NodeToAttributesProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_FederationSubClusterProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SchedulingRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceSizingProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PreemptionMessageProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PreemptionContractProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PreemptionContainerProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_LogAggregationContextProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_QueueStatisticsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_QueueInfoProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PlacementConstraintProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationIdProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationRequestsProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerStatusProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StringStringMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_StringBytesMapProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor internal_static_hadoop_yarn_CollectorInfoProto_descriptor; private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable; public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\021yarn_protos.proto\022\013hadoop.yarn\032\016Securi" + "ty.proto\"\204\001\n\030SerializedExceptionProto\022\017\n" + "\007message\030\001 \001(\t\022\r\n\005trace\030\002 \001(\t\022\022\n\nclass_n" + "ame\030\003 \001(\t\0224\n\005cause\030\004 \001(\0132%.hadoop.yarn.S" + "erializedExceptionProto\";\n\022ApplicationId" + "Proto\022\n\n\002id\030\001 \001(\005\022\031\n\021cluster_timestamp\030\002" + " \001(\003\"g\n\031ApplicationAttemptIdProto\0227\n\016app" + "lication_id\030\001 \001(\0132\037.hadoop.yarn.Applicat" + "ionIdProto\022\021\n\tattemptId\030\002 \001(\005\"\217\001\n\020Contai" + "nerIdProto\022/\n\006app_id\030\001 \001(\0132\037.hadoop.yarn" + ".ApplicationIdProto\022>\n\016app_attempt_id\030\002 " + "\001(\0132&.hadoop.yarn.ApplicationAttemptIdPr" + "oto\022\n\n\002id\030\003 \001(\003\"\271\001\n\030ResourceInformationP" + "roto\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \001(\003\022\r\n\005unit" + "s\030\003 \001(\t\022-\n\004type\030\004 \001(\0162\037.hadoop.yarn.Reso" + "urceTypesProto\022\014\n\004tags\030\005 \003(\t\0225\n\nattribut" + "es\030\006 \003(\0132!.hadoop.yarn.StringStringMapPr" + "oto\"c\n\025ResourceTypeInfoProto\022\014\n\004name\030\001 \002" + "(\t\022\r\n\005units\030\002 \001(\t\022-\n\004type\030\003 \001(\0162\037.hadoop" + ".yarn.ResourceTypesProto\"y\n\rResourceProt" + "o\022\016\n\006memory\030\001 \001(\003\022\025\n\rvirtual_cores\030\002 \001(\005" + "\022A\n\022resource_value_map\030\003 \003(\0132%.hadoop.ya" + "rn.ResourceInformationProto\"~\n\030ResourceU" + "tilizationProto\022\014\n\004pmem\030\001 \001(\005\022\014\n\004vmem\030\002 " + "\001(\005\022\013\n\003cpu\030\003 \001(\002\0229\n\017customResources\030\004 \003(" + "\0132 .hadoop.yarn.StringFloatMapProto\"`\n\023R" + "esourceOptionProto\022,\n\010resource\030\001 \001(\0132\032.h" + "adoop.yarn.ResourceProto\022\033\n\023over_commit_" + "timeout\030\002 \001(\005\"S\n\024ResourceProfileEntry\022\014\n" + "\004name\030\001 \002(\t\022-\n\tresources\030\002 \002(\0132\032.hadoop." + "yarn.ResourceProto\"Y\n\025ResourceProfilesPr" + "oto\022@\n\025resource_profiles_map\030\001 \003(\0132!.had" + "oop.yarn.ResourceProfileEntry\"|\n\024NodeRes" + "ourceMapProto\022)\n\007node_id\030\001 \001(\0132\030.hadoop." + "yarn.NodeIdProto\0229\n\017resource_option\030\002 \001(" + "\0132 .hadoop.yarn.ResourceOptionProto\"!\n\rP" + "riorityProto\022\020\n\010priority\030\001 \001(\005\"\274\003\n\016Conta" + "inerProto\022)\n\002id\030\001 \001(\0132\035.hadoop.yarn.Cont" + "ainerIdProto\022(\n\006nodeId\030\002 \001(\0132\030.hadoop.ya" + "rn.NodeIdProto\022\031\n\021node_http_address\030\003 \001(" + "\t\022,\n\010resource\030\004 \001(\0132\032.hadoop.yarn.Resour" + "ceProto\022,\n\010priority\030\005 \001(\0132\032.hadoop.yarn." + "PriorityProto\0222\n\017container_token\030\006 \001(\0132\031" + ".hadoop.common.TokenProto\022C\n\016execution_t" + "ype\030\007 \001(\0162\037.hadoop.yarn.ExecutionTypePro" + "to:\nGUARANTEED\022!\n\025allocation_request_id\030" + "\010 \001(\003:\002-1\022\022\n\007version\030\t \001(\005:\0010\022\027\n\017allocat" + "ion_tags\030\n \003(\t\022\025\n\rexposed_ports\030\013 \001(\t\"\376\003" + "\n\024ContainerReportProto\0223\n\014container_id\030\001" + " \001(\0132\035.hadoop.yarn.ContainerIdProto\022,\n\010r" + "esource\030\002 \001(\0132\032.hadoop.yarn.ResourceProt" + "o\022)\n\007node_id\030\003 \001(\0132\030.hadoop.yarn.NodeIdP" + "roto\022,\n\010priority\030\004 \001(\0132\032.hadoop.yarn.Pri" + "orityProto\022\025\n\rcreation_time\030\005 \001(\003\022\023\n\013fin" + "ish_time\030\006 \001(\003\022\035\n\020diagnostics_info\030\007 \001(\t" + ":\003N/A\022\017\n\007log_url\030\010 \001(\t\022\035\n\025container_exit" + "_status\030\t \001(\005\0229\n\017container_state\030\n \001(\0162 " + ".hadoop.yarn.ContainerStateProto\022\031\n\021node" + "_http_address\030\013 \001(\t\022B\n\rexecutionType\030\014 \001" + "(\0162\037.hadoop.yarn.ExecutionTypeProto:\nGUA" + "RANTEED\022\025\n\rexposed_ports\030\r \001(\t\"V\n\010URLPro" + "to\022\016\n\006scheme\030\001 \001(\t\022\014\n\004host\030\002 \001(\t\022\014\n\004port" + "\030\003 \001(\005\022\014\n\004file\030\004 \001(\t\022\020\n\010userInfo\030\005 \001(\t\"\215" + "\002\n\022LocalResourceProto\022\'\n\010resource\030\001 \001(\0132" + "\025.hadoop.yarn.URLProto\022\014\n\004size\030\002 \001(\003\022\021\n\t" + "timestamp\030\003 \001(\003\0221\n\004type\030\004 \001(\0162#.hadoop.y" + "arn.LocalResourceTypeProto\022=\n\nvisibility" + "\030\005 \001(\0162).hadoop.yarn.LocalResourceVisibi" + "lityProto\022\017\n\007pattern\030\006 \001(\t\022*\n\"should_be_" + "uploaded_to_shared_cache\030\007 \001(\010\"0\n\022String" + "LongMapProto\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\003" + "\"1\n\023StringFloatMapProto\022\013\n\003key\030\001 \002(\t\022\r\n\005" + "value\030\002 \002(\002\"\325\004\n#ApplicationResourceUsage" + "ReportProto\022\033\n\023num_used_containers\030\001 \001(\005" + "\022\037\n\027num_reserved_containers\030\002 \001(\005\0222\n\016use" + "d_resources\030\003 \001(\0132\032.hadoop.yarn.Resource" + "Proto\0226\n\022reserved_resources\030\004 \001(\0132\032.hado" + "op.yarn.ResourceProto\0224\n\020needed_resource" + "s\030\005 \001(\0132\032.hadoop.yarn.ResourceProto\022\026\n\016m" + "emory_seconds\030\006 \001(\003\022\025\n\rvcore_seconds\030\007 \001" + "(\003\022\036\n\026queue_usage_percentage\030\010 \001(\002\022 \n\030cl" + "uster_usage_percentage\030\t \001(\002\022 \n\030preempte" + "d_memory_seconds\030\n \001(\003\022\037\n\027preempted_vcor" + "e_seconds\030\013 \001(\003\022G\n\036application_resource_" + "usage_map\030\014 \003(\0132\037.hadoop.yarn.StringLong" + "MapProto\022Q\n(application_preempted_resour" + "ce_usage_map\030\r \003(\0132\037.hadoop.yarn.StringL" + "ongMapProto\"\267\010\n\026ApplicationReportProto\0226" + "\n\rapplicationId\030\001 \001(\0132\037.hadoop.yarn.Appl" + "icationIdProto\022\014\n\004user\030\002 \001(\t\022\r\n\005queue\030\003 " + "\001(\t\022\014\n\004name\030\004 \001(\t\022\014\n\004host\030\005 \001(\t\022\020\n\010rpc_p" + "ort\030\006 \001(\005\0225\n\022client_to_am_token\030\007 \001(\0132\031." + "hadoop.common.TokenProto\022F\n\026yarn_applica" + "tion_state\030\010 \001(\0162&.hadoop.yarn.YarnAppli" + "cationStateProto\022\023\n\013trackingUrl\030\t \001(\t\022\030\n" + "\013diagnostics\030\n \001(\t:\003N/A\022\021\n\tstartTime\030\013 \001" + "(\003\022\022\n\nfinishTime\030\014 \001(\003\022J\n\030final_applicat" + "ion_status\030\r \001(\0162(.hadoop.yarn.FinalAppl" + "icationStatusProto\022L\n\022app_resource_Usage" + "\030\016 \001(\01320.hadoop.yarn.ApplicationResource" + "UsageReportProto\022\033\n\023originalTrackingUrl\030" + "\017 \001(\t\022K\n\033currentApplicationAttemptId\030\020 \001" + "(\0132&.hadoop.yarn.ApplicationAttemptIdPro" + "to\022\020\n\010progress\030\021 \001(\002\022\027\n\017applicationType\030" + "\022 \001(\t\022.\n\013am_rm_token\030\023 \001(\0132\031.hadoop.comm" + "on.TokenProto\022\027\n\017applicationTags\030\024 \003(\t\022F" + "\n\026log_aggregation_status\030\025 \001(\0162&.hadoop." + "yarn.LogAggregationStatusProto\022$\n\025unmana" + "ged_application\030\026 \001(\010:\005false\022,\n\010priority" + "\030\027 \001(\0132\032.hadoop.yarn.PriorityProto\022\036\n\026ap" + "pNodeLabelExpression\030\030 \001(\t\022\035\n\025amNodeLabe" + "lExpression\030\031 \001(\t\0225\n\013appTimeouts\030\032 \003(\0132 " + ".hadoop.yarn.AppTimeoutsMapProto\022\022\n\nlaun" + "chTime\030\033 \001(\003\022\022\n\nsubmitTime\030\034 \001(\003\022\023\n\013rmCl" + "usterId\030\035 \001(\t\"\244\001\n\023AppTimeoutsMapProto\022J\n" + "\030application_timeout_type\030\001 \001(\0162(.hadoop" + ".yarn.ApplicationTimeoutTypeProto\022A\n\023app" + "lication_timeout\030\002 \001(\0132$.hadoop.yarn.App" + "licationTimeoutProto\"\222\001\n\027ApplicationTime" + "outProto\022J\n\030application_timeout_type\030\001 \002" + "(\0162(.hadoop.yarn.ApplicationTimeoutTypeP" + "roto\022\023\n\013expire_time\030\002 \001(\t\022\026\n\016remaining_t" + "ime\030\003 \001(\003\"\214\003\n\035ApplicationAttemptReportPr" + "oto\022F\n\026application_attempt_id\030\001 \001(\0132&.ha" + "doop.yarn.ApplicationAttemptIdProto\022\014\n\004h" + "ost\030\002 \001(\t\022\020\n\010rpc_port\030\003 \001(\005\022\024\n\014tracking_" + "url\030\004 \001(\t\022\030\n\013diagnostics\030\005 \001(\t:\003N/A\022U\n\036y" + "arn_application_attempt_state\030\006 \001(\0162-.ha" + "doop.yarn.YarnApplicationAttemptStatePro" + "to\0226\n\017am_container_id\030\007 \001(\0132\035.hadoop.yar" + "n.ContainerIdProto\022\035\n\025original_tracking_" + "url\030\010 \001(\t\022\021\n\tstartTime\030\t \001(\003\022\022\n\nfinishTi" + "me\030\n \001(\003\")\n\013NodeIdProto\022\014\n\004host\030\001 \001(\t\022\014\n" + "\004port\030\002 \001(\005\"\360\004\n\017NodeReportProto\022(\n\006nodeI" + "d\030\001 \001(\0132\030.hadoop.yarn.NodeIdProto\022\023\n\013htt" + "pAddress\030\002 \001(\t\022\020\n\010rackName\030\003 \001(\t\022(\n\004used" + "\030\004 \001(\0132\032.hadoop.yarn.ResourceProto\022.\n\nca" + "pability\030\005 \001(\0132\032.hadoop.yarn.ResourcePro" + "to\022\025\n\rnumContainers\030\006 \001(\005\022/\n\nnode_state\030" + "\007 \001(\0162\033.hadoop.yarn.NodeStateProto\022\025\n\rhe" + "alth_report\030\010 \001(\t\022\037\n\027last_health_report_" + "time\030\t \001(\003\022\023\n\013node_labels\030\n \003(\t\022E\n\026conta" + "iners_utilization\030\013 \001(\0132%.hadoop.yarn.Re" + "sourceUtilizationProto\022?\n\020node_utilizati" + "on\030\014 \001(\0132%.hadoop.yarn.ResourceUtilizati" + "onProto\022\037\n\027decommissioning_timeout\030\r \001(\r" + "\022:\n\020node_update_type\030\016 \001(\0162 .hadoop.yarn" + ".NodeUpdateTypeProto\0228\n\017node_attributes\030" + "\017 \003(\0132\037.hadoop.yarn.NodeAttributeProto\"S" + "\n\023NodeIdToLabelsProto\022(\n\006nodeId\030\001 \001(\0132\030." + "hadoop.yarn.NodeIdProto\022\022\n\nnodeLabels\030\002 " + "\003(\t\"T\n\024LabelsToNodeIdsProto\022\022\n\nnodeLabel" + "s\030\001 \001(\t\022(\n\006nodeId\030\002 \003(\0132\030.hadoop.yarn.No" + "deIdProto\"9\n\016NodeLabelProto\022\014\n\004name\030\001 \001(" + "\t\022\031\n\013isExclusive\030\002 \001(\010:\004true\"S\n\025NodeAttr" + "ibuteKeyProto\022#\n\017attributePrefix\030\001 \001(\t:\n" + "rm.yarn.io\022\025\n\rattributeName\030\002 \002(\t\"\254\001\n\022No" + "deAttributeProto\0228\n\014attributeKey\030\001 \002(\0132\"" + ".hadoop.yarn.NodeAttributeKeyProto\022B\n\rat" + "tributeType\030\002 \001(\0162#.hadoop.yarn.NodeAttr" + "ibuteTypeProto:\006STRING\022\030\n\016attributeValue" + "\030\003 \001(\t:\000\"\216\001\n\026NodeAttributeInfoProto\0228\n\014a" + "ttributeKey\030\001 \002(\0132\".hadoop.yarn.NodeAttr" + "ibuteKeyProto\022:\n\rattributeType\030\002 \002(\0162#.h" + "adoop.yarn.NodeAttributeTypeProto\"E\n\031Nod" + "eToAttributeValueProto\022\020\n\010hostname\030\001 \002(\t" + "\022\026\n\016attributeValue\030\002 \002(\t\"\220\001\n\025AttributeTo" + "NodesProto\0229\n\rnodeAttribute\030\001 \002(\0132\".hado" + "op.yarn.NodeAttributeKeyProto\022<\n\014nodeVal" + "ueMap\030\002 \003(\0132&.hadoop.yarn.NodeToAttribut" + "eValueProto\"^\n\025NodeToAttributesProto\022\014\n\004" + "node\030\001 \001(\t\0227\n\016nodeAttributes\030\002 \003(\0132\037.had" + "oop.yarn.NodeAttributeProto\"\224\001\n\032Deregist" + "erSubClustersProto\022\024\n\014subClusterId\030\001 \001(\t" + "\022\027\n\017deregisterState\030\002 \001(\t\022\031\n\021lastHeartBe" + "atTime\030\003 \001(\t\022\023\n\013information\030\004 \001(\t\022\027\n\017sub" + "ClusterState\030\005 \001(\t\"\214\001\n\032FederationQueueWe" + "ightProto\022\024\n\014routerWeight\030\001 \001(\t\022\022\n\namrmW" + "eight\030\002 \001(\t\022\025\n\rheadRoomAlpha\030\003 \001(\t\022\r\n\005qu" + "eue\030\004 \001(\t\022\036\n\026policyManagerClassName\030\005 \001(" + "\t\"e\n\031FederationSubClusterProto\022\024\n\014subClu" + "sterId\030\001 \001(\t\022\031\n\021lastHeartBeatTime\030\002 \001(\t\022" + "\027\n\017subClusterState\030\003 \001(\t\"\313\002\n\024ResourceReq" + "uestProto\022,\n\010priority\030\001 \001(\0132\032.hadoop.yar" + "n.PriorityProto\022\025\n\rresource_name\030\002 \001(\t\022." + "\n\ncapability\030\003 \001(\0132\032.hadoop.yarn.Resourc" + "eProto\022\026\n\016num_containers\030\004 \001(\005\022\034\n\016relax_" + "locality\030\005 \001(\010:\004true\022\035\n\025node_label_expre" + "ssion\030\006 \001(\t\022F\n\026execution_type_request\030\007 " + "\001(\0132&.hadoop.yarn.ExecutionTypeRequestPr" + "oto\022!\n\025allocation_request_id\030\010 \001(\003:\002-1\"\207" + "\001\n\031ExecutionTypeRequestProto\022C\n\016executio" + "n_type\030\001 \001(\0162\037.hadoop.yarn.ExecutionType" + "Proto:\nGUARANTEED\022%\n\026enforce_execution_t" + "ype\030\002 \001(\010:\005false\"\273\002\n\026SchedulingRequestPr" + "oto\022\036\n\023allocationRequestId\030\001 \001(\003:\0010\022,\n\010p" + "riority\030\002 \001(\0132\032.hadoop.yarn.PriorityProt" + "o\022=\n\rexecutionType\030\003 \001(\0132&.hadoop.yarn.E" + "xecutionTypeRequestProto\022\026\n\016allocationTa" + "gs\030\004 \003(\t\0228\n\016resourceSizing\030\005 \001(\0132 .hadoo" + "p.yarn.ResourceSizingProto\022B\n\023placementC" + "onstraint\030\006 \001(\0132%.hadoop.yarn.PlacementC" + "onstraintProto\"\\\n\023ResourceSizingProto\022\026\n" + "\016numAllocations\030\001 \001(\005\022-\n\tresources\030\002 \001(\013" + "2\032.hadoop.yarn.ResourceProto\"\211\001\n\036Rejecte" + "dSchedulingRequestProto\0221\n\006reason\030\001 \002(\0162" + "!.hadoop.yarn.RejectionReasonProto\0224\n\007re" + "quest\030\002 \002(\0132#.hadoop.yarn.SchedulingRequ" + "estProto\"\224\001\n\026PreemptionMessageProto\022B\n\016s" + "trictContract\030\001 \001(\0132*.hadoop.yarn.Strict" + "PreemptionContractProto\0226\n\010contract\030\002 \001(" + "\0132$.hadoop.yarn.PreemptionContractProto\"" + "Y\n\035StrictPreemptionContractProto\0228\n\tcont" + "ainer\030\001 \003(\0132%.hadoop.yarn.PreemptionCont" + "ainerProto\"\222\001\n\027PreemptionContractProto\022=" + "\n\010resource\030\001 \003(\0132+.hadoop.yarn.Preemptio" + "nResourceRequestProto\0228\n\tcontainer\030\002 \003(\013" + "2%.hadoop.yarn.PreemptionContainerProto\"" + "E\n\030PreemptionContainerProto\022)\n\002id\030\001 \001(\0132" + "\035.hadoop.yarn.ContainerIdProto\"U\n\036Preemp" + "tionResourceRequestProto\0223\n\010resource\030\001 \001" + "(\0132!.hadoop.yarn.ResourceRequestProto\"X\n" + "\035ResourceBlacklistRequestProto\022\033\n\023blackl" + "ist_additions\030\001 \003(\t\022\032\n\022blacklist_removal" + "s\030\002 \003(\t\"\274\007\n!ApplicationSubmissionContext" + "Proto\0227\n\016application_id\030\001 \001(\0132\037.hadoop.y" + "arn.ApplicationIdProto\022\035\n\020application_na" + "me\030\002 \001(\t:\003N/A\022\026\n\005queue\030\003 \001(\t:\007default\022,\n" + "\010priority\030\004 \001(\0132\032.hadoop.yarn.PriorityPr" + "oto\022C\n\021am_container_spec\030\005 \001(\0132(.hadoop." + "yarn.ContainerLaunchContextProto\022)\n\033canc" + "el_tokens_when_complete\030\006 \001(\010:\004true\022\033\n\014u" + "nmanaged_am\030\007 \001(\010:\005false\022\031\n\016maxAppAttemp" + "ts\030\010 \001(\005:\0010\022,\n\010resource\030\t \001(\0132\032.hadoop.y" + "arn.ResourceProto\022\035\n\017applicationType\030\n \001" + "(\t:\004YARN\022:\n+keep_containers_across_appli" + "cation_attempts\030\013 \001(\010:\005false\022\027\n\017applicat" + "ionTags\030\014 \003(\t\022.\n\"attempt_failures_validi" + "ty_interval\030\r \001(\003:\002-1\022H\n\027log_aggregation" + "_context\030\016 \001(\0132\'.hadoop.yarn.LogAggregat" + "ionContextProto\0227\n\016reservation_id\030\017 \001(\0132" + "\037.hadoop.yarn.ReservationIdProto\022\035\n\025node" + "_label_expression\030\020 \001(\t\022H\n\035am_container_" + "resource_request\030\021 \003(\0132!.hadoop.yarn.Res" + "ourceRequestProto\022E\n\024application_timeout" + "s\030\022 \003(\0132\'.hadoop.yarn.ApplicationTimeout" + "MapProto\022L\n!application_scheduling_prope" + "rties\030\023 \003(\0132!.hadoop.yarn.StringStringMa" + "pProto\"y\n\032ApplicationTimeoutMapProto\022J\n\030" + "application_timeout_type\030\001 \001(\0162(.hadoop." + "yarn.ApplicationTimeoutTypeProto\022\017\n\007time" + "out\030\002 \001(\003\"\203\001\n ApplicationUpdateTimeoutMa" + "pProto\022J\n\030application_timeout_type\030\001 \001(\016" + "2(.hadoop.yarn.ApplicationTimeoutTypePro" + "to\022\023\n\013expire_time\030\002 \001(\t\"\372\001\n\032LogAggregati" + "onContextProto\022\033\n\017include_pattern\030\001 \001(\t:" + "\002.*\022\031\n\017exclude_pattern\030\002 \001(\t:\000\022%\n\033rolled" + "_logs_include_pattern\030\003 \001(\t:\000\022\'\n\033rolled_" + "logs_exclude_pattern\030\004 \001(\t:\002.*\022)\n!log_ag" + "gregation_policy_class_name\030\005 \001(\t\022)\n!log" + "_aggregation_policy_parameters\030\006 \001(\t\"e\n\026" + "ApplicationACLMapProto\022;\n\naccessType\030\001 \001" + "(\0162\'.hadoop.yarn.ApplicationAccessTypePr" + "oto\022\016\n\003acl\030\002 \001(\t:\001 \"\362\001\n\027YarnClusterMetri" + "csProto\022\031\n\021num_node_managers\030\001 \001(\005\022\036\n\026nu" + "m_decommissioned_nms\030\002 \001(\005\022\026\n\016num_active" + "_nms\030\003 \001(\005\022\024\n\014num_lost_nms\030\004 \001(\005\022\031\n\021num_" + "unhealthy_nms\030\005 \001(\005\022\030\n\020num_rebooted_nms\030" + "\006 \001(\005\022\037\n\027num_decommissioning_nms\030\007 \001(\005\022\030" + "\n\020num_shutdown_nms\030\010 \001(\005\"\336\003\n\024QueueStatis" + "ticsProto\022\030\n\020numAppsSubmitted\030\001 \001(\003\022\026\n\016n" + "umAppsRunning\030\002 \001(\003\022\026\n\016numAppsPending\030\003 " + "\001(\003\022\030\n\020numAppsCompleted\030\004 \001(\003\022\025\n\rnumApps" + "Killed\030\005 \001(\003\022\025\n\rnumAppsFailed\030\006 \001(\003\022\026\n\016n" + "umActiveUsers\030\007 \001(\003\022\031\n\021availableMemoryMB" + "\030\010 \001(\003\022\031\n\021allocatedMemoryMB\030\t \001(\003\022\027\n\017pen" + "dingMemoryMB\030\n \001(\003\022\030\n\020reservedMemoryMB\030\013" + " \001(\003\022\027\n\017availableVCores\030\014 \001(\003\022\027\n\017allocat" + "edVCores\030\r \001(\003\022\025\n\rpendingVCores\030\016 \001(\003\022\026\n" + "\016reservedVCores\030\017 \001(\003\022\033\n\023allocatedContai" + "ners\030\020 \001(\003\022\031\n\021pendingContainers\030\021 \001(\003\022\032\n" + "\022reservedContainers\030\022 \001(\003\"\361\006\n\016QueueInfoP" + "roto\022\021\n\tqueueName\030\001 \001(\t\022\020\n\010capacity\030\002 \001(" + "\002\022\027\n\017maximumCapacity\030\003 \001(\002\022\027\n\017currentCap" + "acity\030\004 \001(\002\022+\n\005state\030\005 \001(\0162\034.hadoop.yarn" + ".QueueStateProto\0220\n\013childQueues\030\006 \003(\0132\033." + "hadoop.yarn.QueueInfoProto\0229\n\014applicatio" + "ns\030\007 \003(\0132#.hadoop.yarn.ApplicationReport" + "Proto\022\034\n\024accessibleNodeLabels\030\010 \003(\t\022\"\n\032d" + "efaultNodeLabelExpression\030\t \001(\t\022:\n\017queue" + "Statistics\030\n \001(\0132!.hadoop.yarn.QueueStat" + "isticsProto\022\032\n\022preemptionDisabled\030\013 \001(\010\022" + "H\n\026queueConfigurationsMap\030\014 \003(\0132(.hadoop" + ".yarn.QueueConfigurationsMapProto\022$\n\034int" + "raQueuePreemptionDisabled\030\r \001(\010\022\016\n\006weigh" + "t\030\016 \001(\002\022\021\n\tqueuePath\030\017 \001(\t\022\027\n\017maxParalle" + "lApps\030\020 \001(\005\022\025\n\rschedulerType\030\021 \001(\t\022\030\n\020mi" + "nResourceVCore\030\022 \001(\005\022\031\n\021minResourceMemor" + "y\030\023 \001(\003\022\030\n\020maxResourceVCore\030\024 \001(\005\022\031\n\021max" + "ResourceMemory\030\025 \001(\003\022\035\n\025reservedResource" + "VCore\030\026 \001(\005\022\036\n\026reservedResourceMemory\030\027 " + "\001(\003\022\034\n\024steadyFairShareVCore\030\030 \001(\005\022\035\n\025ste" + "adyFairShareMemory\030\031 \001(\003\022\024\n\014subClusterId" + "\030\032 \001(\t\022\025\n\rmaxRunningApp\030\033 \001(\005\"\373\002\n\030QueueC" + "onfigurationsProto\022\020\n\010capacity\030\001 \001(\002\022\030\n\020" + "absoluteCapacity\030\002 \001(\002\022\023\n\013maxCapacity\030\003 " + "\001(\002\022\033\n\023absoluteMaxCapacity\030\004 \001(\002\022\027\n\017maxA" + "MPercentage\030\005 \001(\002\0228\n\024effectiveMinCapacit" + "y\030\006 \001(\0132\032.hadoop.yarn.ResourceProto\0228\n\024e" + "ffectiveMaxCapacity\030\007 \001(\0132\032.hadoop.yarn." + "ResourceProto\0229\n\025configuredMinCapacity\030\010" + " \001(\0132\032.hadoop.yarn.ResourceProto\0229\n\025conf" + "iguredMaxCapacity\030\t \001(\0132\032.hadoop.yarn.Re" + "sourceProto\"x\n\033QueueConfigurationsMapPro" + "to\022\025\n\rpartitionName\030\001 \002(\t\022B\n\023queueConfig" + "urations\030\002 \001(\0132%.hadoop.yarn.QueueConfig" + "urationsProto\"X\n\025QueueUserACLInfoProto\022\021" + "\n\tqueueName\030\001 \001(\t\022,\n\010userAcls\030\002 \003(\0162\032.ha" + "doop.yarn.QueueACLProto\"\256\001\n\030PlacementCon" + "straintProto\022E\n\020simpleConstraint\030\001 \001(\0132+" + ".hadoop.yarn.SimplePlacementConstraintPr" + "oto\022K\n\023compositeConstraint\030\002 \001(\0132..hadoo" + "p.yarn.CompositePlacementConstraintProto" + "\"\347\001\n\036SimplePlacementConstraintProto\022\r\n\005s" + "cope\030\001 \002(\t\022F\n\021targetExpressions\030\002 \003(\0132+." + "hadoop.yarn.PlacementConstraintTargetPro" + "to\022\026\n\016minCardinality\030\003 \001(\005\022\026\n\016maxCardina" + "lity\030\004 \001(\005\022>\n\017attributeOpCode\030\005 \001(\0162%.ha" + "doop.yarn.NodeAttributeOpCodeProto\"\325\001\n\036P" + "lacementConstraintTargetProto\022J\n\ntargetT" + "ype\030\001 \002(\01626.hadoop.yarn.PlacementConstra" + "intTargetProto.TargetType\022\021\n\ttargetKey\030\002" + " \001(\t\022\024\n\014targetValues\030\003 \003(\t\">\n\nTargetType" + "\022\022\n\016NODE_ATTRIBUTE\020\001\022\022\n\016ALLOCATION_TAG\020\002" + "\022\010\n\004SELF\020\003\"\205\002\n\035TimedPlacementConstraintP" + "roto\022B\n\023placementConstraint\030\001 \002(\0132%.hado" + "op.yarn.PlacementConstraintProto\022\027\n\017sche" + "dulingDelay\030\002 \002(\003\022U\n\tdelayUnit\030\003 \001(\01624.h" + "adoop.yarn.TimedPlacementConstraintProto" + ".DelayUnit:\014MILLISECONDS\"0\n\tDelayUnit\022\020\n" + "\014MILLISECONDS\020\001\022\021\n\rOPPORTUNITIES\020\002\"\266\002\n!C" + "ompositePlacementConstraintProto\022S\n\rcomp" + "ositeType\030\001 \002(\0162<.hadoop.yarn.CompositeP" + "lacementConstraintProto.CompositeType\022?\n" + "\020childConstraints\030\002 \003(\0132%.hadoop.yarn.Pl" + "acementConstraintProto\022I\n\025timedChildCons" + "traints\030\003 \003(\0132*.hadoop.yarn.TimedPlaceme" + "ntConstraintProto\"0\n\rCompositeType\022\007\n\003AN" + "D\020\001\022\006\n\002OR\020\002\022\016\n\nDELAYED_OR\020\003\"\200\001\n Placemen" + "tConstraintMapEntryProto\022\027\n\017allocation_t" + "ags\030\001 \003(\t\022C\n\024placement_constraint\030\002 \001(\0132" + "%.hadoop.yarn.PlacementConstraintProto\";" + "\n\022ReservationIdProto\022\n\n\002id\030\001 \001(\003\022\031\n\021clus" + "ter_timestamp\030\002 \001(\003\"\222\001\n\027ReservationReque" + "stProto\022.\n\ncapability\030\001 \001(\0132\032.hadoop.yar" + "n.ResourceProto\022\031\n\016num_containers\030\002 \001(\005:" + "\0011\022\026\n\013concurrency\030\003 \001(\005:\0011\022\024\n\010duration\030\004" + " \001(\003:\002-1\"\254\001\n\030ReservationRequestsProto\022C\n" + "\025reservation_resources\030\001 \003(\0132$.hadoop.ya" + "rn.ReservationRequestProto\022K\n\013interprete" + "r\030\002 \001(\0162/.hadoop.yarn.ReservationRequest" + "InterpreterProto:\005R_ALL\"\356\001\n\032ReservationD" + "efinitionProto\022C\n\024reservation_requests\030\001" + " \001(\0132%.hadoop.yarn.ReservationRequestsPr" + "oto\022\017\n\007arrival\030\002 \001(\003\022\020\n\010deadline\030\003 \001(\003\022\030" + "\n\020reservation_name\030\004 \001(\t\022 \n\025recurrence_e" + "xpression\030\005 \001(\t:\0010\022,\n\010priority\030\006 \001(\0132\032.h" + "adoop.yarn.PriorityProto\"t\n\036ResourceAllo" + "cationRequestProto\022\022\n\nstart_time\030\001 \001(\003\022\020" + "\n\010end_time\030\002 \001(\003\022,\n\010resource\030\003 \001(\0132\032.had" + "oop.yarn.ResourceProto\"\322\002\n\037ReservationAl" + "locationStateProto\022G\n\026reservation_defini" + "tion\030\001 \001(\0132\'.hadoop.yarn.ReservationDefi" + "nitionProto\022H\n\023allocation_requests\030\002 \003(\013" + "2+.hadoop.yarn.ResourceAllocationRequest" + "Proto\022\022\n\nstart_time\030\003 \001(\003\022\020\n\010end_time\030\004 " + "\001(\003\022\014\n\004user\030\005 \001(\t\022\026\n\016contains_gangs\030\006 \001(" + "\010\022\027\n\017acceptance_time\030\007 \001(\003\0227\n\016reservatio" + "n_id\030\010 \001(\0132\037.hadoop.yarn.ReservationIdPr" + "oto\"\216\003\n\033ContainerLaunchContextProto\022@\n\016l" + "ocalResources\030\001 \003(\0132(.hadoop.yarn.String" + "LocalResourceMapProto\022\016\n\006tokens\030\002 \001(\014\0226\n" + "\014service_data\030\003 \003(\0132 .hadoop.yarn.String" + "BytesMapProto\0226\n\013environment\030\004 \003(\0132!.had" + "oop.yarn.StringStringMapProto\022\017\n\007command" + "\030\005 \003(\t\022=\n\020application_ACLs\030\006 \003(\0132#.hadoo" + "p.yarn.ApplicationACLMapProto\022H\n\027contain" + "er_retry_context\030\007 \001(\0132\'.hadoop.yarn.Con" + "tainerRetryContextProto\022\023\n\013tokens_conf\030\010" + " \001(\014\"\251\003\n\024ContainerStatusProto\0223\n\014contain" + "er_id\030\001 \001(\0132\035.hadoop.yarn.ContainerIdPro" + "to\022/\n\005state\030\002 \001(\0162 .hadoop.yarn.Containe" + "rStateProto\022\030\n\013diagnostics\030\003 \001(\t:\003N/A\022\032\n" + "\013exit_status\030\004 \001(\005:\005-1000\022.\n\ncapability\030" + "\005 \001(\0132\032.hadoop.yarn.ResourceProto\022B\n\rexe" + "cutionType\030\006 \001(\0162\037.hadoop.yarn.Execution" + "TypeProto:\nGUARANTEED\022?\n\024container_attri" + "butes\030\007 \003(\0132!.hadoop.yarn.StringStringMa" + "pProto\022@\n\023container_sub_state\030\010 \001(\0162#.ha" + "doop.yarn.ContainerSubStateProto\"\327\001\n\032Con", "tainerRetryContextProto\022I\n\014retry_policy\030" + "\001 \001(\0162&.hadoop.yarn.ContainerRetryPolicy" + "Proto:\013NEVER_RETRY\022\023\n\013error_codes\030\002 \003(\005\022" + "\026\n\013max_retries\030\003 \001(\005:\0010\022\031\n\016retry_interva" + "l\030\004 \001(\005:\0010\022&\n\032failures_validity_interval" + "\030\005 \001(\003:\002-1\"Z\n\033StringLocalResourceMapProt" + "o\022\013\n\003key\030\001 \001(\t\022.\n\005value\030\002 \001(\0132\037.hadoop.y" + "arn.LocalResourceProto\"2\n\024StringStringMa" + "pProto\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"1\n\023St" + "ringBytesMapProto\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" + "\002 \001(\014\"`\n\022CollectorInfoProto\022\026\n\016collector" + "_addr\030\001 \001(\t\0222\n\017collector_token\030\002 \001(\0132\031.h" + "adoop.common.TokenProto*#\n\022ResourceTypes" + "Proto\022\r\n\tCOUNTABLE\020\000*?\n\023ContainerStatePr" + "oto\022\t\n\005C_NEW\020\001\022\r\n\tC_RUNNING\020\002\022\016\n\nC_COMPL" + "ETE\020\003*n\n\026ContainerSubStateProto\022\021\n\rCSS_S" + "CHEDULED\020\001\022\017\n\013CSS_RUNNING\020\002\022\016\n\nCSS_PAUSE" + "D\020\003\022\022\n\016CSS_COMPLETING\020\004\022\014\n\010CSS_DONE\020\005*\204\001" + "\n\031YarnApplicationStateProto\022\007\n\003NEW\020\001\022\016\n\n" + "NEW_SAVING\020\002\022\r\n\tSUBMITTED\020\003\022\014\n\010ACCEPTED\020" + "\004\022\013\n\007RUNNING\020\005\022\014\n\010FINISHED\020\006\022\n\n\006FAILED\020\007" + "\022\n\n\006KILLED\020\010*\302\002\n YarnApplicationAttemptS" + "tateProto\022\023\n\017APP_ATTEMPT_NEW\020\001\022\031\n\025APP_AT" + "TEMPT_SUBMITTED\020\002\022\031\n\025APP_ATTEMPT_SCHEDUL" + "ED\020\003\022 \n\034APP_ATTEMPT_ALLOCATED_SAVING\020\004\022\031" + "\n\025APP_ATTEMPT_ALLOCATED\020\005\022\030\n\024APP_ATTEMPT" + "_LAUNCHED\020\006\022\026\n\022APP_ATTEMPT_FAILED\020\007\022\027\n\023A" + "PP_ATTEMPT_RUNNING\020\010\022\031\n\025APP_ATTEMPT_FINI" + "SHING\020\t\022\030\n\024APP_ATTEMPT_FINISHED\020\n\022\026\n\022APP" + "_ATTEMPT_KILLED\020\013*r\n\033FinalApplicationSta" + "tusProto\022\021\n\rAPP_UNDEFINED\020\000\022\021\n\rAPP_SUCCE" + "EDED\020\001\022\016\n\nAPP_FAILED\020\002\022\016\n\nAPP_KILLED\020\003\022\r" + "\n\tAPP_ENDED\020\004*H\n\034LocalResourceVisibility" + "Proto\022\n\n\006PUBLIC\020\001\022\013\n\007PRIVATE\020\002\022\017\n\013APPLIC" + "ATION\020\003*<\n\026LocalResourceTypeProto\022\013\n\007ARC" + "HIVE\020\001\022\010\n\004FILE\020\002\022\013\n\007PATTERN\020\003*\244\001\n\031LogAgg" + "regationStatusProto\022\020\n\014LOG_DISABLED\020\001\022\021\n" + "\rLOG_NOT_START\020\002\022\017\n\013LOG_RUNNING\020\003\022\021\n\rLOG" + "_SUCCEEDED\020\004\022\016\n\nLOG_FAILED\020\005\022\020\n\014LOG_TIME" + "_OUT\020\006\022\034\n\030LOG_RUNNING_WITH_FAILURE\020\007*\234\001\n" + "\016NodeStateProto\022\n\n\006NS_NEW\020\001\022\016\n\nNS_RUNNIN" + "G\020\002\022\020\n\014NS_UNHEALTHY\020\003\022\025\n\021NS_DECOMMISSION" + "ED\020\004\022\013\n\007NS_LOST\020\005\022\017\n\013NS_REBOOTED\020\006\022\026\n\022NS" + "_DECOMMISSIONING\020\007\022\017\n\013NS_SHUTDOWN\020\010*S\n\023N" + "odeUpdateTypeProto\022\017\n\013NODE_USABLE\020\000\022\021\n\rN" + "ODE_UNUSABLE\020\001\022\030\n\024NODE_DECOMMISSIONING\020\002" + "*$\n\026NodeAttributeTypeProto\022\n\n\006STRING\020\001*6" + "\n\022ContainerTypeProto\022\026\n\022APPLICATION_MAST" + "ER\020\001\022\010\n\004TASK\020\002*7\n\022ExecutionTypeProto\022\016\n\n" + "GUARANTEED\020\001\022\021\n\rOPPORTUNISTIC\020\002*0\n\016AMCom" + "mandProto\022\r\n\tAM_RESYNC\020\001\022\017\n\013AM_SHUTDOWN\020" + "\002*[\n\024RejectionReasonProto\022\037\n\033RRP_COULD_N" + "OT_PLACE_ON_NODE\020\001\022\"\n\036RRP_COULD_NOT_SCHE" + "DULE_ON_NODE\020\002*7\n\033ApplicationTimeoutType" + "Proto\022\030\n\024APP_TIMEOUT_LIFETIME\020\001*N\n\032Appli" + "cationAccessTypeProto\022\026\n\022APPACCESS_VIEW_" + "APP\020\001\022\030\n\024APPACCESS_MODIFY_APP\020\002*?\n\017Queue" + "StateProto\022\r\n\tQ_STOPPED\020\001\022\r\n\tQ_RUNNING\020\002" + "\022\016\n\nQ_DRAINING\020\003*H\n\rQueueACLProto\022\034\n\030QAC" + "L_SUBMIT_APPLICATIONS\020\001\022\031\n\025QACL_ADMINIST" + "ER_QUEUE\020\002*c\n\033SignalContainerCommandProt" + "o\022\026\n\022OUTPUT_THREAD_DUMP\020\001\022\025\n\021GRACEFUL_SH" + "UTDOWN\020\002\022\025\n\021FORCEFUL_SHUTDOWN\020\003*5\n\030NodeA" + "ttributeOpCodeProto\022\t\n\005NO_OP\020\001\022\006\n\002EQ\020\002\022\006" + "\n\002NE\020\003*[\n\"ReservationRequestInterpreterP" + "roto\022\t\n\005R_ANY\020\000\022\t\n\005R_ALL\020\001\022\013\n\007R_ORDER\020\002\022" + "\022\n\016R_ORDER_NO_GAP\020\003*n\n\030ContainerExitStat" + "usProto\022\013\n\007SUCCESS\020\000\022\024\n\007INVALID\020\230\370\377\377\377\377\377\377" + "\377\001\022\024\n\007ABORTED\020\234\377\377\377\377\377\377\377\377\001\022\031\n\014DISKS_FAILED" + "\020\233\377\377\377\377\377\377\377\377\001*h\n\031ContainerRetryPolicyProto" + "\022\017\n\013NEVER_RETRY\020\000\022\027\n\023RETRY_ON_ALL_ERRORS" + "\020\001\022!\n\035RETRY_ON_SPECIFIC_ERROR_CODES\020\002B0\n" + "\034org.apache.hadoop.yarn.protoB\nYarnProto" + "s\210\001\001\240\001\001" }; descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(), }); internal_static_hadoop_yarn_SerializedExceptionProto_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SerializedExceptionProto_descriptor, new java.lang.String[] { "Message", "Trace", "ClassName", "Cause", }); internal_static_hadoop_yarn_ApplicationIdProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationIdProto_descriptor, new java.lang.String[] { "Id", "ClusterTimestamp", }); internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor, new java.lang.String[] { "ApplicationId", "AttemptId", }); internal_static_hadoop_yarn_ContainerIdProto_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerIdProto_descriptor, new java.lang.String[] { "AppId", "AppAttemptId", "Id", }); internal_static_hadoop_yarn_ResourceInformationProto_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceInformationProto_descriptor, new java.lang.String[] { "Key", "Value", "Units", "Type", "Tags", "Attributes", }); internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor, new java.lang.String[] { "Name", "Units", "Type", }); internal_static_hadoop_yarn_ResourceProto_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceProto_descriptor, new java.lang.String[] { "Memory", "VirtualCores", "ResourceValueMap", }); internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor, new java.lang.String[] { "Pmem", "Vmem", "Cpu", "CustomResources", }); internal_static_hadoop_yarn_ResourceOptionProto_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceOptionProto_descriptor, new java.lang.String[] { "Resource", "OverCommitTimeout", }); internal_static_hadoop_yarn_ResourceProfileEntry_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceProfileEntry_descriptor, new java.lang.String[] { "Name", "Resources", }); internal_static_hadoop_yarn_ResourceProfilesProto_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceProfilesProto_descriptor, new java.lang.String[] { "ResourceProfilesMap", }); internal_static_hadoop_yarn_NodeResourceMapProto_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeResourceMapProto_descriptor, new java.lang.String[] { "NodeId", "ResourceOption", }); internal_static_hadoop_yarn_PriorityProto_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PriorityProto_descriptor, new java.lang.String[] { "Priority", }); internal_static_hadoop_yarn_ContainerProto_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerProto_descriptor, new java.lang.String[] { "Id", "NodeId", "NodeHttpAddress", "Resource", "Priority", "ContainerToken", "ExecutionType", "AllocationRequestId", "Version", "AllocationTags", "ExposedPorts", }); internal_static_hadoop_yarn_ContainerReportProto_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerReportProto_descriptor, new java.lang.String[] { "ContainerId", "Resource", "NodeId", "Priority", "CreationTime", "FinishTime", "DiagnosticsInfo", "LogUrl", "ContainerExitStatus", "ContainerState", "NodeHttpAddress", "ExecutionType", "ExposedPorts", }); internal_static_hadoop_yarn_URLProto_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hadoop_yarn_URLProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_URLProto_descriptor, new java.lang.String[] { "Scheme", "Host", "Port", "File", "UserInfo", }); internal_static_hadoop_yarn_LocalResourceProto_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_LocalResourceProto_descriptor, new java.lang.String[] { "Resource", "Size", "Timestamp", "Type", "Visibility", "Pattern", "ShouldBeUploadedToSharedCache", }); internal_static_hadoop_yarn_StringLongMapProto_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StringLongMapProto_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_hadoop_yarn_StringFloatMapProto_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StringFloatMapProto_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor, new java.lang.String[] { "NumUsedContainers", "NumReservedContainers", "UsedResources", "ReservedResources", "NeededResources", "MemorySeconds", "VcoreSeconds", "QueueUsagePercentage", "ClusterUsagePercentage", "PreemptedMemorySeconds", "PreemptedVcoreSeconds", "ApplicationResourceUsageMap", "ApplicationPreemptedResourceUsageMap", }); internal_static_hadoop_yarn_ApplicationReportProto_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationReportProto_descriptor, new java.lang.String[] { "ApplicationId", "User", "Queue", "Name", "Host", "RpcPort", "ClientToAmToken", "YarnApplicationState", "TrackingUrl", "Diagnostics", "StartTime", "FinishTime", "FinalApplicationStatus", "AppResourceUsage", "OriginalTrackingUrl", "CurrentApplicationAttemptId", "Progress", "ApplicationType", "AmRmToken", "ApplicationTags", "LogAggregationStatus", "UnmanagedApplication", "Priority", "AppNodeLabelExpression", "AmNodeLabelExpression", "AppTimeouts", "LaunchTime", "SubmitTime", "RmClusterId", }); internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor, new java.lang.String[] { "ApplicationTimeoutType", "ApplicationTimeout", }); internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor, new java.lang.String[] { "ApplicationTimeoutType", "ExpireTime", "RemainingTime", }); internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor, new java.lang.String[] { "ApplicationAttemptId", "Host", "RpcPort", "TrackingUrl", "Diagnostics", "YarnApplicationAttemptState", "AmContainerId", "OriginalTrackingUrl", "StartTime", "FinishTime", }); internal_static_hadoop_yarn_NodeIdProto_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeIdProto_descriptor, new java.lang.String[] { "Host", "Port", }); internal_static_hadoop_yarn_NodeReportProto_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeReportProto_descriptor, new java.lang.String[] { "NodeId", "HttpAddress", "RackName", "Used", "Capability", "NumContainers", "NodeState", "HealthReport", "LastHealthReportTime", "NodeLabels", "ContainersUtilization", "NodeUtilization", "DecommissioningTimeout", "NodeUpdateType", "NodeAttributes", }); internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor, new java.lang.String[] { "NodeId", "NodeLabels", }); internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor, new java.lang.String[] { "NodeLabels", "NodeId", }); internal_static_hadoop_yarn_NodeLabelProto_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeLabelProto_descriptor, new java.lang.String[] { "Name", "IsExclusive", }); internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor, new java.lang.String[] { "AttributePrefix", "AttributeName", }); internal_static_hadoop_yarn_NodeAttributeProto_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeAttributeProto_descriptor, new java.lang.String[] { "AttributeKey", "AttributeType", "AttributeValue", }); internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor, new java.lang.String[] { "AttributeKey", "AttributeType", }); internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor, new java.lang.String[] { "Hostname", "AttributeValue", }); internal_static_hadoop_yarn_AttributeToNodesProto_descriptor = getDescriptor().getMessageTypes().get(33); internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_AttributeToNodesProto_descriptor, new java.lang.String[] { "NodeAttribute", "NodeValueMap", }); internal_static_hadoop_yarn_NodeToAttributesProto_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_NodeToAttributesProto_descriptor, new java.lang.String[] { "Node", "NodeAttributes", }); internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor = getDescriptor().getMessageTypes().get(35); internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor, new java.lang.String[] { "SubClusterId", "DeregisterState", "LastHeartBeatTime", "Information", "SubClusterState", }); internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor, new java.lang.String[] { "RouterWeight", "AmrmWeight", "HeadRoomAlpha", "Queue", "PolicyManagerClassName", }); internal_static_hadoop_yarn_FederationSubClusterProto_descriptor = getDescriptor().getMessageTypes().get(37); internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_FederationSubClusterProto_descriptor, new java.lang.String[] { "SubClusterId", "LastHeartBeatTime", "SubClusterState", }); internal_static_hadoop_yarn_ResourceRequestProto_descriptor = getDescriptor().getMessageTypes().get(38); internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceRequestProto_descriptor, new java.lang.String[] { "Priority", "ResourceName", "Capability", "NumContainers", "RelaxLocality", "NodeLabelExpression", "ExecutionTypeRequest", "AllocationRequestId", }); internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor = getDescriptor().getMessageTypes().get(39); internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor, new java.lang.String[] { "ExecutionType", "EnforceExecutionType", }); internal_static_hadoop_yarn_SchedulingRequestProto_descriptor = getDescriptor().getMessageTypes().get(40); internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SchedulingRequestProto_descriptor, new java.lang.String[] { "AllocationRequestId", "Priority", "ExecutionType", "AllocationTags", "ResourceSizing", "PlacementConstraint", }); internal_static_hadoop_yarn_ResourceSizingProto_descriptor = getDescriptor().getMessageTypes().get(41); internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceSizingProto_descriptor, new java.lang.String[] { "NumAllocations", "Resources", }); internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor = getDescriptor().getMessageTypes().get(42); internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor, new java.lang.String[] { "Reason", "Request", }); internal_static_hadoop_yarn_PreemptionMessageProto_descriptor = getDescriptor().getMessageTypes().get(43); internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PreemptionMessageProto_descriptor, new java.lang.String[] { "StrictContract", "Contract", }); internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor = getDescriptor().getMessageTypes().get(44); internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor, new java.lang.String[] { "Container", }); internal_static_hadoop_yarn_PreemptionContractProto_descriptor = getDescriptor().getMessageTypes().get(45); internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PreemptionContractProto_descriptor, new java.lang.String[] { "Resource", "Container", }); internal_static_hadoop_yarn_PreemptionContainerProto_descriptor = getDescriptor().getMessageTypes().get(46); internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PreemptionContainerProto_descriptor, new java.lang.String[] { "Id", }); internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor = getDescriptor().getMessageTypes().get(47); internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor, new java.lang.String[] { "Resource", }); internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor = getDescriptor().getMessageTypes().get(48); internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor, new java.lang.String[] { "BlacklistAdditions", "BlacklistRemovals", }); internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor = getDescriptor().getMessageTypes().get(49); internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor, new java.lang.String[] { "ApplicationId", "ApplicationName", "Queue", "Priority", "AmContainerSpec", "CancelTokensWhenComplete", "UnmanagedAm", "MaxAppAttempts", "Resource", "ApplicationType", "KeepContainersAcrossApplicationAttempts", "ApplicationTags", "AttemptFailuresValidityInterval", "LogAggregationContext", "ReservationId", "NodeLabelExpression", "AmContainerResourceRequest", "ApplicationTimeouts", "ApplicationSchedulingProperties", }); internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor = getDescriptor().getMessageTypes().get(50); internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor, new java.lang.String[] { "ApplicationTimeoutType", "Timeout", }); internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor = getDescriptor().getMessageTypes().get(51); internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor, new java.lang.String[] { "ApplicationTimeoutType", "ExpireTime", }); internal_static_hadoop_yarn_LogAggregationContextProto_descriptor = getDescriptor().getMessageTypes().get(52); internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_LogAggregationContextProto_descriptor, new java.lang.String[] { "IncludePattern", "ExcludePattern", "RolledLogsIncludePattern", "RolledLogsExcludePattern", "LogAggregationPolicyClassName", "LogAggregationPolicyParameters", }); internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor = getDescriptor().getMessageTypes().get(53); internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor, new java.lang.String[] { "AccessType", "Acl", }); internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor = getDescriptor().getMessageTypes().get(54); internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor, new java.lang.String[] { "NumNodeManagers", "NumDecommissionedNms", "NumActiveNms", "NumLostNms", "NumUnhealthyNms", "NumRebootedNms", "NumDecommissioningNms", "NumShutdownNms", }); internal_static_hadoop_yarn_QueueStatisticsProto_descriptor = getDescriptor().getMessageTypes().get(55); internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_QueueStatisticsProto_descriptor, new java.lang.String[] { "NumAppsSubmitted", "NumAppsRunning", "NumAppsPending", "NumAppsCompleted", "NumAppsKilled", "NumAppsFailed", "NumActiveUsers", "AvailableMemoryMB", "AllocatedMemoryMB", "PendingMemoryMB", "ReservedMemoryMB", "AvailableVCores", "AllocatedVCores", "PendingVCores", "ReservedVCores", "AllocatedContainers", "PendingContainers", "ReservedContainers", }); internal_static_hadoop_yarn_QueueInfoProto_descriptor = getDescriptor().getMessageTypes().get(56); internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_QueueInfoProto_descriptor, new java.lang.String[] { "QueueName", "Capacity", "MaximumCapacity", "CurrentCapacity", "State", "ChildQueues", "Applications", "AccessibleNodeLabels", "DefaultNodeLabelExpression", "QueueStatistics", "PreemptionDisabled", "QueueConfigurationsMap", "IntraQueuePreemptionDisabled", "Weight", "QueuePath", "MaxParallelApps", "SchedulerType", "MinResourceVCore", "MinResourceMemory", "MaxResourceVCore", "MaxResourceMemory", "ReservedResourceVCore", "ReservedResourceMemory", "SteadyFairShareVCore", "SteadyFairShareMemory", "SubClusterId", "MaxRunningApp", }); internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor = getDescriptor().getMessageTypes().get(57); internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor, new java.lang.String[] { "Capacity", "AbsoluteCapacity", "MaxCapacity", "AbsoluteMaxCapacity", "MaxAMPercentage", "EffectiveMinCapacity", "EffectiveMaxCapacity", "ConfiguredMinCapacity", "ConfiguredMaxCapacity", }); internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor = getDescriptor().getMessageTypes().get(58); internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor, new java.lang.String[] { "PartitionName", "QueueConfigurations", }); internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor = getDescriptor().getMessageTypes().get(59); internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor, new java.lang.String[] { "QueueName", "UserAcls", }); internal_static_hadoop_yarn_PlacementConstraintProto_descriptor = getDescriptor().getMessageTypes().get(60); internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PlacementConstraintProto_descriptor, new java.lang.String[] { "SimpleConstraint", "CompositeConstraint", }); internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor = getDescriptor().getMessageTypes().get(61); internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor, new java.lang.String[] { "Scope", "TargetExpressions", "MinCardinality", "MaxCardinality", "AttributeOpCode", }); internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor = getDescriptor().getMessageTypes().get(62); internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor, new java.lang.String[] { "TargetType", "TargetKey", "TargetValues", }); internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor = getDescriptor().getMessageTypes().get(63); internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor, new java.lang.String[] { "PlacementConstraint", "SchedulingDelay", "DelayUnit", }); internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor = getDescriptor().getMessageTypes().get(64); internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor, new java.lang.String[] { "CompositeType", "ChildConstraints", "TimedChildConstraints", }); internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor = getDescriptor().getMessageTypes().get(65); internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor, new java.lang.String[] { "AllocationTags", "PlacementConstraint", }); internal_static_hadoop_yarn_ReservationIdProto_descriptor = getDescriptor().getMessageTypes().get(66); internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationIdProto_descriptor, new java.lang.String[] { "Id", "ClusterTimestamp", }); internal_static_hadoop_yarn_ReservationRequestProto_descriptor = getDescriptor().getMessageTypes().get(67); internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationRequestProto_descriptor, new java.lang.String[] { "Capability", "NumContainers", "Concurrency", "Duration", }); internal_static_hadoop_yarn_ReservationRequestsProto_descriptor = getDescriptor().getMessageTypes().get(68); internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationRequestsProto_descriptor, new java.lang.String[] { "ReservationResources", "Interpreter", }); internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor = getDescriptor().getMessageTypes().get(69); internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor, new java.lang.String[] { "ReservationRequests", "Arrival", "Deadline", "ReservationName", "RecurrenceExpression", "Priority", }); internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor = getDescriptor().getMessageTypes().get(70); internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor, new java.lang.String[] { "StartTime", "EndTime", "Resource", }); internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor = getDescriptor().getMessageTypes().get(71); internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor, new java.lang.String[] { "ReservationDefinition", "AllocationRequests", "StartTime", "EndTime", "User", "ContainsGangs", "AcceptanceTime", "ReservationId", }); internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor = getDescriptor().getMessageTypes().get(72); internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor, new java.lang.String[] { "LocalResources", "Tokens", "ServiceData", "Environment", "Command", "ApplicationACLs", "ContainerRetryContext", "TokensConf", }); internal_static_hadoop_yarn_ContainerStatusProto_descriptor = getDescriptor().getMessageTypes().get(73); internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerStatusProto_descriptor, new java.lang.String[] { "ContainerId", "State", "Diagnostics", "ExitStatus", "Capability", "ExecutionType", "ContainerAttributes", "ContainerSubState", }); internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor = getDescriptor().getMessageTypes().get(74); internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor, new java.lang.String[] { "RetryPolicy", "ErrorCodes", "MaxRetries", "RetryInterval", "FailuresValidityInterval", }); internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor = getDescriptor().getMessageTypes().get(75); internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_hadoop_yarn_StringStringMapProto_descriptor = getDescriptor().getMessageTypes().get(76); internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StringStringMapProto_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_hadoop_yarn_StringBytesMapProto_descriptor = getDescriptor().getMessageTypes().get(77); internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_StringBytesMapProto_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_hadoop_yarn_CollectorInfoProto_descriptor = getDescriptor().getMessageTypes().get(78); internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable = new org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hadoop_yarn_CollectorInfoProto_descriptor, new java.lang.String[] { "CollectorAddr", "CollectorToken", }); org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }




© 2015 - 2025 Weber Informatics LLC | Privacy Policy